Video message recording improvements

This commit is contained in:
Ilya Laktyushin 2024-01-12 11:16:22 +04:00
parent cd6acadf41
commit d3faf52a4c
50 changed files with 3129 additions and 492 deletions

View File

@ -10886,3 +10886,9 @@ Sorry for the inconvenience.";
"Conversation.ContactAddContact" = "ADD";
"Conversation.ContactMessage" = "MESSAGE";
"Chat.PlayOnceVideoMessageTooltip" = "This video message can only be played once.";
"Chat.PlayOnceVideoMessageYourTooltip" = "This message will disappear once **%@** plays it once.";
"Chat.TapToPlayVideoMessageOnceTooltip" = "Tap to set this message to **Play Once**";
"Chat.PlayVideoMessageOnceTooltip" = "The recipient will be able to play it only once.";

View File

@ -977,6 +977,8 @@ public protocol SharedAccountContext: AnyObject {
var enablePreloads: Promise<Bool> { get }
var hasPreloadBlockingContent: Promise<Bool> { get }
var deviceContactPhoneNumbers: Promise<Set<String>> { get }
var hasGroupCallOnScreen: Signal<Bool, NoError> { get }
var currentGroupCallController: ViewController? { get }

View File

@ -53,6 +53,7 @@ public final class ChatMessageItemAssociatedData: Equatable {
public let recommendedChannels: RecommendedChannels?
public let audioTranscriptionTrial: AudioTranscription.TrialState
public let chatThemes: [TelegramTheme]
public let deviceContactsNumbers: Set<String>
public let isStandalone: Bool
public init(
@ -81,6 +82,7 @@ public final class ChatMessageItemAssociatedData: Equatable {
recommendedChannels: RecommendedChannels? = nil,
audioTranscriptionTrial: AudioTranscription.TrialState = .defaultValue,
chatThemes: [TelegramTheme] = [],
deviceContactsNumbers: Set<String> = Set(),
isStandalone: Bool = false
) {
self.automaticDownloadPeerType = automaticDownloadPeerType
@ -108,6 +110,7 @@ public final class ChatMessageItemAssociatedData: Equatable {
self.recommendedChannels = recommendedChannels
self.audioTranscriptionTrial = audioTranscriptionTrial
self.chatThemes = chatThemes
self.deviceContactsNumbers = deviceContactsNumbers
self.isStandalone = isStandalone
}
@ -184,6 +187,9 @@ public final class ChatMessageItemAssociatedData: Equatable {
if lhs.chatThemes != rhs.chatThemes {
return false
}
if lhs.deviceContactsNumbers != rhs.deviceContactsNumbers {
return false
}
if lhs.isStandalone != rhs.isStandalone {
return false
}

View File

@ -50,11 +50,11 @@ final class CameraDeviceContext {
let input = CameraInput()
let output: CameraOutput
init(session: CameraSession, exclusive: Bool, additional: Bool, ciContext: CIContext) {
init(session: CameraSession, exclusive: Bool, additional: Bool, ciContext: CIContext, use32BGRA: Bool = false) {
self.session = session
self.exclusive = exclusive
self.additional = additional
self.output = CameraOutput(exclusive: exclusive, ciContext: ciContext)
self.output = CameraOutput(exclusive: exclusive, ciContext: ciContext, use32BGRA: use32BGRA)
}
func configure(position: Camera.Position, previewView: CameraSimplePreviewView?, audio: Bool, photo: Bool, metadata: Bool, preferWide: Bool = false, preferLowerFramerate: Bool = false) {
@ -288,7 +288,6 @@ private final class CameraContext {
private var micLevelPeak: Int16 = 0
private var micLevelPeakCount = 0
private var isDualCameraEnabled: Bool?
public func setDualCameraEnabled(_ enabled: Bool, change: Bool = true) {
guard enabled != self.isDualCameraEnabled else {
@ -303,10 +302,10 @@ private final class CameraContext {
if enabled {
self.configure {
self.mainDeviceContext?.invalidate()
self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: false, ciContext: self.ciContext)
self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: false, ciContext: self.ciContext, use32BGRA: self.initialConfiguration.isRoundVideo)
self.mainDeviceContext?.configure(position: .back, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata)
self.additionalDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: true, ciContext: self.ciContext)
self.additionalDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: true, ciContext: self.ciContext, use32BGRA: self.initialConfiguration.isRoundVideo)
self.additionalDeviceContext?.configure(position: .front, previewView: self.secondaryPreviewView, audio: false, photo: true, metadata: false)
}
self.mainDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in
@ -343,7 +342,7 @@ private final class CameraContext {
self.additionalDeviceContext?.invalidate()
self.additionalDeviceContext = nil
self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: true, additional: false, ciContext: self.ciContext)
self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: true, additional: false, ciContext: self.ciContext, use32BGRA: false)
self.mainDeviceContext?.configure(position: self.positionValue, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: self.initialConfiguration.preferWide, preferLowerFramerate: self.initialConfiguration.preferLowerFramerate)
}
self.mainDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in
@ -476,6 +475,10 @@ private final class CameraContext {
self.mainDeviceContext?.device.setZoomDelta(zoomDelta)
}
func rampZoom(_ zoomLevel: CGFloat, rate: CGFloat) {
self.mainDeviceContext?.device.rampZoom(zoomLevel, rate: rate)
}
func takePhoto() -> Signal<PhotoCaptureResult, NoError> {
guard let mainDeviceContext = self.mainDeviceContext else {
return .complete()
@ -509,15 +512,19 @@ private final class CameraContext {
mainDeviceContext.device.setTorchMode(self._flashMode)
let orientation = self.simplePreviewView?.videoPreviewLayer.connection?.videoOrientation ?? .portrait
if let additionalDeviceContext = self.additionalDeviceContext {
return combineLatest(
mainDeviceContext.output.startRecording(isDualCamera: true, position: self.positionValue, orientation: orientation),
additionalDeviceContext.output.startRecording(isDualCamera: true, orientation: .portrait)
) |> map { value, _ in
return value
}
if self.initialConfiguration.isRoundVideo {
return mainDeviceContext.output.startRecording(mode: .roundVideo, orientation: .portrait, additionalOutput: self.additionalDeviceContext?.output)
} else {
return mainDeviceContext.output.startRecording(isDualCamera: false, orientation: orientation)
if let additionalDeviceContext = self.additionalDeviceContext {
return combineLatest(
mainDeviceContext.output.startRecording(mode: .dualCamera, position: self.positionValue, orientation: orientation),
additionalDeviceContext.output.startRecording(mode: .dualCamera, orientation: .portrait)
) |> map { value, _ in
return value
}
} else {
return mainDeviceContext.output.startRecording(mode: .default, orientation: orientation)
}
}
}
@ -525,41 +532,12 @@ private final class CameraContext {
guard let mainDeviceContext = self.mainDeviceContext else {
return .complete()
}
if let additionalDeviceContext = self.additionalDeviceContext {
return combineLatest(
mainDeviceContext.output.stopRecording(),
additionalDeviceContext.output.stopRecording()
) |> mapToSignal { main, additional in
if case let .finished(mainResult, _, duration, positionChangeTimestamps, _) = main, case let .finished(additionalResult, _, _, _, _) = additional {
var additionalThumbnailImage = additionalResult.thumbnail
if let cgImage = additionalResult.thumbnail.cgImage {
additionalThumbnailImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .leftMirrored)
}
return .single(
.finished(
main: mainResult,
additional: VideoCaptureResult.Result(path: additionalResult.path, thumbnail: additionalThumbnailImage, isMirrored: true, dimensions: additionalResult.dimensions),
duration: duration,
positionChangeTimestamps: positionChangeTimestamps,
captureTimestamp: CACurrentMediaTime()
)
)
} else {
return .complete()
}
}
} else {
let isMirrored = self.positionValue == .front
if self.initialConfiguration.isRoundVideo {
return mainDeviceContext.output.stopRecording()
|> map { result -> VideoCaptureResult in
if case let .finished(mainResult, _, duration, positionChangeTimestamps, captureTimestamp) = result {
var thumbnailImage = mainResult.thumbnail
if isMirrored, let cgImage = thumbnailImage.cgImage {
thumbnailImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .leftMirrored)
}
return .finished(
main: VideoCaptureResult.Result(path: mainResult.path, thumbnail: thumbnailImage, isMirrored: isMirrored, dimensions: mainResult.dimensions),
main: mainResult,
additional: nil,
duration: duration,
positionChangeTimestamps: positionChangeTimestamps,
@ -569,6 +547,52 @@ private final class CameraContext {
return result
}
}
} else {
if let additionalDeviceContext = self.additionalDeviceContext {
return combineLatest(
mainDeviceContext.output.stopRecording(),
additionalDeviceContext.output.stopRecording()
) |> mapToSignal { main, additional in
if case let .finished(mainResult, _, duration, positionChangeTimestamps, _) = main, case let .finished(additionalResult, _, _, _, _) = additional {
var additionalThumbnailImage = additionalResult.thumbnail
if let cgImage = additionalResult.thumbnail.cgImage {
additionalThumbnailImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .leftMirrored)
}
return .single(
.finished(
main: mainResult,
additional: VideoCaptureResult.Result(path: additionalResult.path, thumbnail: additionalThumbnailImage, isMirrored: true, dimensions: additionalResult.dimensions),
duration: duration,
positionChangeTimestamps: positionChangeTimestamps,
captureTimestamp: CACurrentMediaTime()
)
)
} else {
return .complete()
}
}
} else {
let isMirrored = self.positionValue == .front
return mainDeviceContext.output.stopRecording()
|> map { result -> VideoCaptureResult in
if case let .finished(mainResult, _, duration, positionChangeTimestamps, captureTimestamp) = result {
var thumbnailImage = mainResult.thumbnail
if isMirrored, let cgImage = thumbnailImage.cgImage {
thumbnailImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .leftMirrored)
}
return .finished(
main: VideoCaptureResult.Result(path: mainResult.path, thumbnail: thumbnailImage, isMirrored: isMirrored, dimensions: mainResult.dimensions),
additional: nil,
duration: duration,
positionChangeTimestamps: positionChangeTimestamps,
captureTimestamp: captureTimestamp
)
} else {
return result
}
}
}
}
}
@ -619,8 +643,9 @@ public final class Camera {
let preferWide: Bool
let preferLowerFramerate: Bool
let reportAudioLevel: Bool
let isRoundVideo: Bool
public init(preset: Preset, position: Position, isDualEnabled: Bool = false, audio: Bool, photo: Bool, metadata: Bool, preferWide: Bool = false, preferLowerFramerate: Bool = false, reportAudioLevel: Bool = false) {
public init(preset: Preset, position: Position, isDualEnabled: Bool = false, audio: Bool, photo: Bool, metadata: Bool, preferWide: Bool = false, preferLowerFramerate: Bool = false, reportAudioLevel: Bool = false, isRoundVideo: Bool = false) {
self.preset = preset
self.position = position
self.isDualEnabled = isDualEnabled
@ -630,6 +655,7 @@ public final class Camera {
self.preferWide = preferWide
self.preferLowerFramerate = preferLowerFramerate
self.reportAudioLevel = reportAudioLevel
self.isRoundVideo = isRoundVideo
}
}
@ -822,6 +848,14 @@ public final class Camera {
}
}
public func rampZoom(_ zoomLevel: CGFloat, rate: CGFloat) {
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
context.rampZoom(zoomLevel, rate: rate)
}
}
}
public func setTorchActive(_ active: Bool) {
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {

View File

@ -313,6 +313,15 @@ final class CameraDevice {
}
}
func rampZoom(_ zoomLevel: CGFloat, rate: CGFloat) {
guard let device = self.videoDevice else {
return
}
self.transaction(device) { device in
device.ramp(toVideoZoomFactor: zoomLevel, withRate: Float(rate))
}
}
func resetZoom(neutral: Bool = true) {
guard let device = self.videoDevice else {
return

View File

@ -80,6 +80,7 @@ public struct CameraCode: Equatable {
final class CameraOutput: NSObject {
let exclusive: Bool
let ciContext: CIContext
let isVideoMessage: Bool
let photoOutput = AVCapturePhotoOutput()
let videoOutput = AVCaptureVideoDataOutput()
@ -89,6 +90,8 @@ final class CameraOutput: NSObject {
private var photoConnection: AVCaptureConnection?
private var videoConnection: AVCaptureConnection?
private var previewConnection: AVCaptureConnection?
private var roundVideoFilter: CameraRoundVideoFilter?
private let queue = DispatchQueue(label: "")
private let metadataQueue = DispatchQueue(label: "")
@ -99,10 +102,11 @@ final class CameraOutput: NSObject {
var processSampleBuffer: ((CMSampleBuffer, CVImageBuffer, AVCaptureConnection) -> Void)?
var processAudioBuffer: ((CMSampleBuffer) -> Void)?
var processCodes: (([CameraCode]) -> Void)?
init(exclusive: Bool, ciContext: CIContext) {
init(exclusive: Bool, ciContext: CIContext, use32BGRA: Bool = false) {
self.exclusive = exclusive
self.ciContext = ciContext
self.isVideoMessage = use32BGRA
super.init()
@ -111,7 +115,7 @@ final class CameraOutput: NSObject {
}
self.videoOutput.alwaysDiscardsLateVideoFrames = false
self.videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] as [String : Any]
self.videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey: use32BGRA ? kCVPixelFormatType_32BGRA : kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] as [String : Any]
}
deinit {
@ -224,6 +228,7 @@ final class CameraOutput: NSObject {
if let videoDataOutputConnection = self.videoOutput.connection(with: .video) {
if videoDataOutputConnection.isVideoStabilizationSupported {
videoDataOutputConnection.preferredVideoStabilizationMode = .standard
// videoDataOutputConnection.preferredVideoStabilizationMode = self.isVideoMessage ? .cinematic : .standard
}
}
}
@ -282,68 +287,95 @@ final class CameraOutput: NSObject {
return self.videoRecorder != nil
}
enum RecorderMode {
case `default`
case roundVideo
case dualCamera
}
private var currentMode: RecorderMode = .default
private var recordingCompletionPipe = ValuePipe<VideoCaptureResult>()
func startRecording(isDualCamera: Bool, position: Camera.Position? = nil, orientation: AVCaptureVideoOrientation) -> Signal<Double, NoError> {
func startRecording(mode: RecorderMode, position: Camera.Position? = nil, orientation: AVCaptureVideoOrientation, additionalOutput: CameraOutput? = nil) -> Signal<Double, NoError> {
guard self.videoRecorder == nil else {
return .complete()
}
self.currentMode = mode
let codecType: AVVideoCodecType
if hasHEVCHardwareEncoder {
codecType = .hevc
} else {
if case .roundVideo = mode {
codecType = .h264
} else {
if hasHEVCHardwareEncoder {
codecType = .hevc
} else {
codecType = .h264
}
}
guard let videoSettings = self.videoOutput.recommendedVideoSettings(forVideoCodecType: codecType, assetWriterOutputFileType: .mp4) else {
guard var videoSettings = self.videoOutput.recommendedVideoSettings(forVideoCodecType: codecType, assetWriterOutputFileType: .mp4) else {
return .complete()
}
let audioSettings = self.audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: .mp4) ?? [:]
var dimensions: CGSize = CGSize(width: 1080, height: 1920)
if orientation == .landscapeLeft {
dimensions = CGSize(width: 1920, height: 1080)
} else if orientation == .landscapeRight {
if orientation == .landscapeLeft || orientation == .landscapeRight {
dimensions = CGSize(width: 1920, height: 1080)
}
var orientation = orientation
if case .roundVideo = mode {
videoSettings[AVVideoWidthKey] = 400
videoSettings[AVVideoHeightKey] = 400
dimensions = CGSize(width: 400, height: 400)
orientation = .landscapeRight
}
let audioSettings = self.audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: .mp4) ?? [:]
let outputFileName = NSUUID().uuidString
let outputFilePath = NSTemporaryDirectory() + outputFileName + ".mp4"
let outputFileURL = URL(fileURLWithPath: outputFilePath)
let videoRecorder = VideoRecorder(configuration: VideoRecorder.Configuration(videoSettings: videoSettings, audioSettings: audioSettings), ciContext: self.ciContext, orientation: orientation, fileUrl: outputFileURL, completion: { [weak self] result in
guard let self else {
return
}
if case let .success(transitionImage, duration, positionChangeTimestamps) = result {
self.recordingCompletionPipe.putNext(
.finished(
main: VideoCaptureResult.Result(
path: outputFilePath,
thumbnail: transitionImage ?? UIImage(),
isMirrored: false,
dimensions: dimensions
),
additional: nil,
duration: duration,
positionChangeTimestamps: positionChangeTimestamps.map { ($0 == .front, $1) },
captureTimestamp: CACurrentMediaTime()
let videoRecorder = VideoRecorder(
configuration: VideoRecorder.Configuration(videoSettings: videoSettings, audioSettings: audioSettings),
ciContext: self.ciContext,
orientation: orientation,
fileUrl: outputFileURL,
completion: { [weak self] result in
guard let self else {
return
}
if case let .success(transitionImage, duration, positionChangeTimestamps) = result {
self.recordingCompletionPipe.putNext(
.finished(
main: VideoCaptureResult.Result(
path: outputFilePath,
thumbnail: transitionImage ?? UIImage(),
isMirrored: false,
dimensions: dimensions
),
additional: nil,
duration: duration,
positionChangeTimestamps: positionChangeTimestamps.map { ($0 == .front, $1) },
captureTimestamp: CACurrentMediaTime()
)
)
)
} else {
self.recordingCompletionPipe.putNext(.failed)
} else {
self.recordingCompletionPipe.putNext(.failed)
}
}
})
)
videoRecorder?.start()
self.videoRecorder = videoRecorder
if isDualCamera, let position {
if case .dualCamera = mode, let position {
videoRecorder?.markPositionChange(position: position, time: .zero)
} else if case .roundVideo = mode {
additionalOutput?.masterOutput = self
}
return Signal { subscriber in
let timer = SwiftSignalKit.Timer(timeout: 0.1, repeat: true, completion: { [weak videoRecorder] in
let timer = SwiftSignalKit.Timer(timeout: 0.02, repeat: true, completion: { [weak videoRecorder] in
subscriber.putNext(videoRecorder?.duration ?? 0.0)
}, queue: Queue.mainQueue())
timer.start()
@ -367,7 +399,86 @@ final class CameraOutput: NSObject {
}
}
private weak var masterOutput: CameraOutput?
func processVideoRecording(_ sampleBuffer: CMSampleBuffer, fromAdditionalOutput: Bool) {
if let videoRecorder = self.videoRecorder, videoRecorder.isRecording {
if case .roundVideo = self.currentMode {
if let processedSampleBuffer = self.processRoundVideoSampleBuffer(sampleBuffer, mirror: fromAdditionalOutput) {
if case .front = self.currentPosition {
if fromAdditionalOutput {
videoRecorder.appendSampleBuffer(processedSampleBuffer)
}
} else {
if !fromAdditionalOutput {
videoRecorder.appendSampleBuffer(processedSampleBuffer)
}
}
} else {
videoRecorder.appendSampleBuffer(sampleBuffer)
}
} else {
videoRecorder.appendSampleBuffer(sampleBuffer)
}
}
}
private func processRoundVideoSampleBuffer(_ sampleBuffer: CMSampleBuffer, mirror: Bool) -> CMSampleBuffer? {
guard let videoPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer), let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer) else {
return nil
}
let mediaSubType = CMFormatDescriptionGetMediaSubType(formatDescription)
let extensions = CMFormatDescriptionGetExtensions(formatDescription) as! [String: Any]
var updatedExtensions = extensions
updatedExtensions["CVBytesPerRow"] = 400 * 4
var newFormatDescription: CMFormatDescription?
var status = CMVideoFormatDescriptionCreate(allocator: nil, codecType: mediaSubType, width: 400, height: 400, extensions: updatedExtensions as CFDictionary, formatDescriptionOut: &newFormatDescription)
guard status == noErr, let newFormatDescription else {
return nil
}
let filter: CameraRoundVideoFilter
if let current = self.roundVideoFilter {
filter = current
} else {
filter = CameraRoundVideoFilter(ciContext: self.ciContext)
self.roundVideoFilter = filter
}
if !filter.isPrepared {
filter.prepare(with: newFormatDescription, outputRetainedBufferCountHint: 3)
}
guard let newPixelBuffer = filter.render(pixelBuffer: videoPixelBuffer, mirror: mirror) else {
return nil
}
var sampleTimingInfo: CMSampleTimingInfo = .invalid
CMSampleBufferGetSampleTimingInfo(sampleBuffer, at: 0, timingInfoOut: &sampleTimingInfo)
var newSampleBuffer: CMSampleBuffer?
status = CMSampleBufferCreateForImageBuffer(
allocator: kCFAllocatorDefault,
imageBuffer: newPixelBuffer,
dataReady: true,
makeDataReadyCallback: nil,
refcon: nil,
formatDescription: newFormatDescription,
sampleTiming: &sampleTimingInfo,
sampleBufferOut: &newSampleBuffer
)
if status == noErr, let newSampleBuffer {
return newSampleBuffer
}
return nil
}
private var currentPosition: Camera.Position = .front
private var lastSwitchTimestamp: Double = 0.0
func markPositionChange(position: Camera.Position) {
self.currentPosition = position
if let videoRecorder = self.videoRecorder {
videoRecorder.markPositionChange(position: position)
}
@ -386,8 +497,10 @@ extension CameraOutput: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureA
// self.processAudioBuffer?(sampleBuffer)
}
if let videoRecorder = self.videoRecorder, videoRecorder.isRecording {
videoRecorder.appendSampleBuffer(sampleBuffer)
if let masterOutput = self.masterOutput {
masterOutput.processVideoRecording(sampleBuffer, fromAdditionalOutput: true)
} else {
self.processVideoRecording(sampleBuffer, fromAdditionalOutput: false)
}
}

View File

@ -0,0 +1,181 @@
import Foundation
import UIKit
import CoreImage
import CoreMedia
import CoreVideo
import Metal
import Display
func allocateOutputBufferPool(with inputFormatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int) -> (
outputBufferPool: CVPixelBufferPool?,
outputColorSpace: CGColorSpace?,
outputFormatDescription: CMFormatDescription?) {
let inputMediaSubType = CMFormatDescriptionGetMediaSubType(inputFormatDescription)
if inputMediaSubType != kCVPixelFormatType_32BGRA {
return (nil, nil, nil)
}
let inputDimensions = CMVideoFormatDescriptionGetDimensions(inputFormatDescription)
var pixelBufferAttributes: [String: Any] = [
kCVPixelBufferPixelFormatTypeKey as String: UInt(inputMediaSubType),
kCVPixelBufferWidthKey as String: Int(inputDimensions.width),
kCVPixelBufferHeightKey as String: Int(inputDimensions.height),
kCVPixelBufferIOSurfacePropertiesKey as String: [:] as NSDictionary
]
var cgColorSpace = CGColorSpaceCreateDeviceRGB()
if let inputFormatDescriptionExtension = CMFormatDescriptionGetExtensions(inputFormatDescription) as Dictionary? {
let colorPrimaries = inputFormatDescriptionExtension[kCVImageBufferColorPrimariesKey]
if let colorPrimaries = colorPrimaries {
var colorSpaceProperties: [String: AnyObject] = [kCVImageBufferColorPrimariesKey as String: colorPrimaries]
if let yCbCrMatrix = inputFormatDescriptionExtension[kCVImageBufferYCbCrMatrixKey] {
colorSpaceProperties[kCVImageBufferYCbCrMatrixKey as String] = yCbCrMatrix
}
if let transferFunction = inputFormatDescriptionExtension[kCVImageBufferTransferFunctionKey] {
colorSpaceProperties[kCVImageBufferTransferFunctionKey as String] = transferFunction
}
pixelBufferAttributes[kCVBufferPropagatedAttachmentsKey as String] = colorSpaceProperties
}
if let cvColorspace = inputFormatDescriptionExtension[kCVImageBufferCGColorSpaceKey] {
cgColorSpace = cvColorspace as! CGColorSpace
} else if (colorPrimaries as? String) == (kCVImageBufferColorPrimaries_P3_D65 as String) {
cgColorSpace = CGColorSpace(name: CGColorSpace.displayP3)!
}
}
let poolAttributes = [kCVPixelBufferPoolMinimumBufferCountKey as String: outputRetainedBufferCountHint]
var cvPixelBufferPool: CVPixelBufferPool?
CVPixelBufferPoolCreate(kCFAllocatorDefault, poolAttributes as NSDictionary?, pixelBufferAttributes as NSDictionary?, &cvPixelBufferPool)
guard let pixelBufferPool = cvPixelBufferPool else {
return (nil, nil, nil)
}
preallocateBuffers(pool: pixelBufferPool, allocationThreshold: outputRetainedBufferCountHint)
var pixelBuffer: CVPixelBuffer?
var outputFormatDescription: CMFormatDescription?
let auxAttributes = [kCVPixelBufferPoolAllocationThresholdKey as String: outputRetainedBufferCountHint] as NSDictionary
CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, pixelBufferPool, auxAttributes, &pixelBuffer)
if let pixelBuffer = pixelBuffer {
CMVideoFormatDescriptionCreateForImageBuffer(allocator: kCFAllocatorDefault,
imageBuffer: pixelBuffer,
formatDescriptionOut: &outputFormatDescription)
}
pixelBuffer = nil
return (pixelBufferPool, cgColorSpace, outputFormatDescription)
}
private func preallocateBuffers(pool: CVPixelBufferPool, allocationThreshold: Int) {
var pixelBuffers = [CVPixelBuffer]()
var error: CVReturn = kCVReturnSuccess
let auxAttributes = [kCVPixelBufferPoolAllocationThresholdKey as String: allocationThreshold] as NSDictionary
var pixelBuffer: CVPixelBuffer?
while error == kCVReturnSuccess {
error = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, pool, auxAttributes, &pixelBuffer)
if let pixelBuffer = pixelBuffer {
pixelBuffers.append(pixelBuffer)
}
pixelBuffer = nil
}
pixelBuffers.removeAll()
}
class CameraRoundVideoFilter {
private let ciContext: CIContext
private var resizeFilter: CIFilter?
private var compositeFilter: CIFilter?
private var outputColorSpace: CGColorSpace?
private var outputPixelBufferPool: CVPixelBufferPool?
private(set) var outputFormatDescription: CMFormatDescription?
private(set) var inputFormatDescription: CMFormatDescription?
private(set) var isPrepared = false
init(ciContext: CIContext) {
self.ciContext = ciContext
}
func prepare(with formatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int) {
self.reset()
(self.outputPixelBufferPool, self.outputColorSpace, self.outputFormatDescription) = allocateOutputBufferPool(with: formatDescription, outputRetainedBufferCountHint: outputRetainedBufferCountHint)
if self.outputPixelBufferPool == nil {
return
}
self.inputFormatDescription = formatDescription
let diameter: CGFloat = 400.0
let circleImage = generateImage(CGSize(width: diameter, height: diameter), opaque: false, scale: 1.0, rotatedContext: { size, context in
let bounds = CGRect(origin: .zero, size: size)
context.clear(bounds)
context.setFillColor(UIColor.white.cgColor)
context.fill(bounds)
context.setBlendMode(.clear)
context.fillEllipse(in: bounds)
})!
self.resizeFilter = CIFilter(name: "CILanczosScaleTransform")
self.compositeFilter = CIFilter(name: "CISourceOverCompositing")
self.compositeFilter?.setValue(CIImage(image: circleImage), forKey: kCIInputImageKey)
self.isPrepared = true
}
func reset() {
self.resizeFilter = nil
self.compositeFilter = nil
self.outputColorSpace = nil
self.outputPixelBufferPool = nil
self.outputFormatDescription = nil
self.inputFormatDescription = nil
self.isPrepared = false
}
func render(pixelBuffer: CVPixelBuffer, mirror: Bool) -> CVPixelBuffer? {
guard let resizeFilter = self.resizeFilter, let compositeFilter = self.compositeFilter, self.isPrepared else {
return nil
}
var sourceImage = CIImage(cvImageBuffer: pixelBuffer)
sourceImage = sourceImage.oriented(mirror ? .leftMirrored : .right)
let scale = 400.0 / min(sourceImage.extent.width, sourceImage.extent.height)
resizeFilter.setValue(sourceImage, forKey: kCIInputImageKey)
resizeFilter.setValue(scale, forKey: kCIInputScaleKey)
if let resizedImage = resizeFilter.outputImage {
sourceImage = resizedImage
} else {
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeScale(scale, scale), highQualityDownsample: true)
}
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeTranslation(0.0, -(sourceImage.extent.height - sourceImage.extent.width) / 2.0))
sourceImage = sourceImage.cropped(to: CGRect(x: 0.0, y: 0.0, width: sourceImage.extent.width, height: sourceImage.extent.width))
compositeFilter.setValue(sourceImage, forKey: kCIInputBackgroundImageKey)
let finalImage = compositeFilter.outputImage
guard let finalImage else {
return nil
}
var pbuf: CVPixelBuffer?
CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, outputPixelBufferPool!, &pbuf)
guard let outputPixelBuffer = pbuf else {
return nil
}
self.ciContext.render(finalImage, to: outputPixelBuffer, bounds: CGRect(origin: .zero, size: CGSize(width: 400, height: 400)), colorSpace: outputColorSpace)
return outputPixelBuffer
}
}

View File

@ -519,7 +519,7 @@ public final class VideoRecorder {
func markPositionChange(position: Camera.Position, time: CMTime? = nil) {
self.impl.markPositionChange(position: position, time: time)
}
func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
guard let formatDescriptor = CMSampleBufferGetFormatDescription(sampleBuffer) else {
return

View File

@ -1,5 +1,6 @@
import Foundation
import UIKit
import SwiftSignalKit
import Postbox
import TelegramCore
import TelegramPresentationData
@ -146,34 +147,87 @@ public struct ChatSearchData: Equatable {
}
}
public final class ChatRecordedMediaPreview: Equatable {
public let resource: TelegramMediaResource
public let fileSize: Int32
public let duration: Int32
public let waveform: AudioWaveform
public init(resource: TelegramMediaResource, duration: Int32, fileSize: Int32, waveform: AudioWaveform) {
self.resource = resource
self.duration = duration
self.fileSize = fileSize
self.waveform = waveform
public enum ChatRecordedMediaPreview: Equatable {
public class Audio: Equatable {
public let resource: TelegramMediaResource
public let fileSize: Int32
public let duration: Int32
public let waveform: AudioWaveform
public init(
resource: TelegramMediaResource,
fileSize: Int32,
duration: Int32,
waveform: AudioWaveform
) {
self.resource = resource
self.fileSize = fileSize
self.duration = duration
self.waveform = waveform
}
public static func ==(lhs: Audio, rhs: Audio) -> Bool {
if !lhs.resource.isEqual(to: rhs.resource) {
return false
}
if lhs.duration != rhs.duration {
return false
}
if lhs.fileSize != rhs.fileSize {
return false
}
if lhs.waveform != rhs.waveform {
return false
}
return true
}
}
public static func ==(lhs: ChatRecordedMediaPreview, rhs: ChatRecordedMediaPreview) -> Bool {
if !lhs.resource.isEqual(to: rhs.resource) {
return false
public class Video: Equatable {
public class Control {
public let updateTrimRange: (Double, Double, Bool, Bool) -> Void
public init(updateTrimRange: @escaping (Double, Double, Bool, Bool) -> Void) {
self.updateTrimRange = updateTrimRange
}
}
if lhs.duration != rhs.duration {
return false
public let duration: Int32
public let frames: [UIImage]
public let framesUpdateTimestamp: Double
public let trimRange: Range<Double>?
public let control: Control
public init(
duration: Int32,
frames: [UIImage],
framesUpdateTimestamp: Double,
trimRange: Range<Double>?,
control: Control
) {
self.duration = duration
self.frames = frames
self.framesUpdateTimestamp = framesUpdateTimestamp
self.trimRange = trimRange
self.control = control
}
if lhs.fileSize != rhs.fileSize {
return false
public static func ==(lhs: Video, rhs: Video) -> Bool {
if lhs.duration != rhs.duration {
return false
}
if lhs.framesUpdateTimestamp != rhs.framesUpdateTimestamp {
return false
}
if lhs.trimRange != rhs.trimRange {
return false
}
return true
}
if lhs.waveform != rhs.waveform {
return false
}
return true
}
case audio(Audio)
case video(Video)
}
public struct ChatContactStatus: Equatable {

View File

@ -87,8 +87,6 @@
- (void)_commitLocked;
- (void)lockImmediately;
- (void)setHidesPanelOnLock;
- (UIView *)createLockPanelView;

View File

@ -602,33 +602,6 @@ static const CGFloat outerCircleMinScale = innerCircleRadius / outerCircleRadius
return iconImage;
}
- (void)lockImmediately {
_lockView.lockness = 1.0;
[_lock updateLockness:1.0];
UIImage *icon = TGComponentsImageNamed(@"RecordSendIcon");
[self setIcon:TGTintedImage(icon, _pallete != nil && !_hidesPanelOnLock ? _pallete.iconColor : [UIColor whiteColor])];
_currentScale = 1;
_cancelTargetTranslation = 0;
id<TGModernConversationInputMicButtonDelegate> delegate = _delegate;
if ([delegate respondsToSelector:@selector(micButtonInteractionUpdateCancelTranslation:)])
[delegate micButtonInteractionUpdateCancelTranslation:-_cancelTargetTranslation];
_lockPanelView.frame = CGRectMake(_lockPanelView.frame.origin.x, 40.0f, _lockPanelView.frame.size.width, 72.0f - 32.0f);
_lockView.transform = CGAffineTransformMakeTranslation(0.0f, -11.0f);
_lock.transform = CGAffineTransformMakeTranslation(0.0f, -16.0f);
_lockArrowView.transform = CGAffineTransformMakeTranslation(0.0f, -39.0f);
_lockArrowView.alpha = 0.0f;
_stopButton.userInteractionEnabled = true;
[UIView animateWithDuration:0.25 delay:0.56 options:kNilOptions animations:^
{
_stopButton.alpha = 1.0f;
} completion:nil];
}
- (void)animateLock {
if (!_animatedIn) {
return;

View File

@ -9,7 +9,7 @@ public final class VideoMediaResourceAdjustments: PostboxCoding, Equatable {
public let digest: MemoryBuffer
public let isStory: Bool
public init(data: MemoryBuffer, digest: MemoryBuffer, isStory: Bool) {
public init(data: MemoryBuffer, digest: MemoryBuffer, isStory: Bool = false) {
self.data = data
self.digest = digest
self.isStory = isStory
@ -161,7 +161,7 @@ public final class LocalFileVideoMediaResource: TelegramMediaResource {
}
public let randomId: Int64
public let path: String
public let paths: [String]
public let adjustments: VideoMediaResourceAdjustments?
public var headerSize: Int32 {
@ -170,19 +170,30 @@ public final class LocalFileVideoMediaResource: TelegramMediaResource {
public init(randomId: Int64, path: String, adjustments: VideoMediaResourceAdjustments?) {
self.randomId = randomId
self.path = path
self.paths = [path]
self.adjustments = adjustments
}
public init(randomId: Int64, paths: [String], adjustments: VideoMediaResourceAdjustments?) {
self.randomId = randomId
self.paths = paths
self.adjustments = adjustments
}
public required init(decoder: PostboxDecoder) {
self.randomId = decoder.decodeInt64ForKey("i", orElse: 0)
self.path = decoder.decodeStringForKey("p", orElse: "")
let paths = decoder.decodeStringArrayForKey("ps")
if !paths.isEmpty {
self.paths = paths
} else {
self.paths = [decoder.decodeStringForKey("p", orElse: "")]
}
self.adjustments = decoder.decodeObjectForKey("a", decoder: { VideoMediaResourceAdjustments(decoder: $0) }) as? VideoMediaResourceAdjustments
}
public func encode(_ encoder: PostboxEncoder) {
encoder.encodeInt64(self.randomId, forKey: "i")
encoder.encodeString(self.path, forKey: "p")
encoder.encodeStringArray(self.paths, forKey: "ps")
if let adjustments = self.adjustments {
encoder.encodeObject(adjustments, forKey: "a")
} else {
@ -196,7 +207,7 @@ public final class LocalFileVideoMediaResource: TelegramMediaResource {
public func isEqual(to: MediaResource) -> Bool {
if let to = to as? LocalFileVideoMediaResource {
return self.randomId == to.randomId && self.path == to.path && self.adjustments == to.adjustments
return self.randomId == to.randomId && self.paths == to.paths && self.adjustments == to.adjustments
} else {
return false
}

View File

@ -111,6 +111,7 @@ swift_library(
"//submodules/InvisibleInkDustNode",
"//submodules/AlertUI",
"//submodules/TelegramUI/Components/Chat/MergedAvatarsNode",
"//submodules/TelegramUI/Components/LottieComponent",
],
visibility = [
"//visibility:public",

View File

@ -11,6 +11,7 @@ import BundleIconComponent
import BalancedTextComponent
import MultilineTextComponent
import SolidRoundedButtonComponent
import LottieComponent
import AccountContext
private final class SheetContent: CombinedComponent {
@ -59,7 +60,7 @@ private final class SheetContent: CombinedComponent {
let closeButton = Child(Button.self)
let iconBackground = Child(Image.self)
let icon = Child(BundleIconComponent.self)
let icon = Child(LottieComponent.self)
let title = Child(BalancedTextComponent.self)
let text = Child(BalancedTextComponent.self)
@ -106,13 +107,13 @@ private final class SheetContent: CombinedComponent {
let peerName = "Name"
switch component.subject {
case .presence:
iconName = "Premium/PrivacyPresence"
iconName = "PremiumPrivacyPresence"
titleString = "Show Your Last Seen"
textString = "To see **\(peerName)'s** Last Seen time, either start showing your own Last Seen Time..."
buttonTitle = "Show My Last Seen to Everyone"
premiumString = "Subscription will let you see **\(peerName)'s** Last Seen status without showing yours."
case .readTime:
iconName = "Premium/PrivacyReadTime"
iconName = "PremiumPrivacyRead"
titleString = "Show Your Read Date"
textString = "To see when **\(peerName)** read the message, either start showing your own read time:"
buttonTitle = "Show My Read Time"
@ -162,11 +163,20 @@ private final class SheetContent: CombinedComponent {
.position(CGPoint(x: context.availableSize.width / 2.0, y: contentSize.height + iconBackground.size.height / 2.0))
)
// let icon = icon.update(
// component: BundleIconComponent(name: iconName, tintColor: .white),
// availableSize: CGSize(width: 70.0, height: 70.0),
// transition: .immediate
// )
let icon = icon.update(
component: BundleIconComponent(name: iconName, tintColor: .white),
availableSize: CGSize(width: 70.0, height: 70.0),
component: LottieComponent(
content: LottieComponent.AppBundleContent(name: iconName)
),
availableSize: CGSize(width: 70, height: 70),
transition: .immediate
)
context.add(icon
.position(CGPoint(x: context.availableSize.width / 2.0, y: contentSize.height + iconBackground.size.height / 2.0))
)

View File

@ -208,6 +208,14 @@ public func combineLatest<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13
}, initialValues: [:], queue: queue)
}
public func combineLatest<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, E>(queue: Queue? = nil, _ s1: Signal<T1, E>, _ s2: Signal<T2, E>, _ s3: Signal<T3, E>, _ s4: Signal<T4, E>, _ s5: Signal<T5, E>, _ s6: Signal<T6, E>, _ s7: Signal<T7, E>, _ s8: Signal<T8, E>, _ s9: Signal<T9, E>, _ s10: Signal<T10, E>, _ s11: Signal<T11, E>, _ s12: Signal<T12, E>, _ s13: Signal<T13, E>, _ s14: Signal<T14, E>, _ s15: Signal<T15, E>, _ s16: Signal<T16, E>, _ s17: Signal<T17, E>, _ s18: Signal<T18, E>, _ s19: Signal<T19, E>, _ s20: Signal<T20, E>, _ s21: Signal<T21, E>) -> Signal<(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21), E> {
return combineLatestAny([signalOfAny(s1), signalOfAny(s2), signalOfAny(s3), signalOfAny(s4), signalOfAny(s5), signalOfAny(s6), signalOfAny(s7), signalOfAny(s8), signalOfAny(s9), signalOfAny(s10), signalOfAny(s11), signalOfAny(s12), signalOfAny(s13), signalOfAny(s14), signalOfAny(s15), signalOfAny(s16), signalOfAny(s17), signalOfAny(s18), signalOfAny(s19), signalOfAny(s20), signalOfAny(s21)], combine: { values in
return (values[0] as! T1, values[1] as! T2, values[2] as! T3, values[3] as! T4, values[4] as! T5, values[5] as! T6, values[6] as! T7, values[7] as! T8, values[8] as! T9, values[9] as! T10, values[10] as! T11, values[11] as! T12, values[12] as! T13, values[13] as! T14, values[14] as! T15, values[15] as! T16, values[16] as! T17, values[17] as! T18, values[18] as! T19, values[19] as! T20, values[20] as! T21)
}, initialValues: [:], queue: queue)
}
public func combineLatest<T, E>(queue: Queue? = nil, _ signals: [Signal<T, E>]) -> Signal<[T], E> {
if signals.count == 0 {
return single([T](), E.self)

View File

@ -96,6 +96,10 @@ public final class ScreenCaptureDetectionManager {
guard let strongSelf = self else {
return
}
var value = value
#if DEBUG
value = false
#endif
strongSelf.isRecordingActive = value
if value {
if strongSelf.screenRecordingCheckTimer == nil {

View File

@ -192,6 +192,9 @@ private enum ApplicationSpecificGlobalNotice: Int32 {
case voiceMessagesPlayOnceSuggestion = 58
case incomingVoiceMessagePlayOnceTip = 59
case outgoingVoiceMessagePlayOnceTip = 60
case videoMessagesPlayOnceSuggestion = 61
case incomingVideoMessagePlayOnceTip = 62
case outgoingVideoMessagePlayOnceTip = 63
var key: ValueBoxKey {
let v = ValueBoxKey(length: 4)
@ -489,6 +492,18 @@ private struct ApplicationSpecificNoticeKeys {
static func outgoingVoiceMessagePlayOnceTip() -> NoticeEntryKey {
return NoticeEntryKey(namespace: noticeNamespace(namespace: globalNamespace), key: ApplicationSpecificGlobalNotice.outgoingVoiceMessagePlayOnceTip.key)
}
static func videoMessagesPlayOnceSuggestion() -> NoticeEntryKey {
return NoticeEntryKey(namespace: noticeNamespace(namespace: globalNamespace), key: ApplicationSpecificGlobalNotice.videoMessagesPlayOnceSuggestion.key)
}
static func incomingVideoMessagePlayOnceTip() -> NoticeEntryKey {
return NoticeEntryKey(namespace: noticeNamespace(namespace: globalNamespace), key: ApplicationSpecificGlobalNotice.incomingVideoMessagePlayOnceTip.key)
}
static func outgoingVideoMessagePlayOnceTip() -> NoticeEntryKey {
return NoticeEntryKey(namespace: noticeNamespace(namespace: globalNamespace), key: ApplicationSpecificGlobalNotice.outgoingVideoMessagePlayOnceTip.key)
}
}
public struct ApplicationSpecificNotice {
@ -1984,4 +1999,85 @@ public struct ApplicationSpecificNotice {
return Int(previousValue)
}
}
public static func getVideoMessagesPlayOnceSuggestion(accountManager: AccountManager<TelegramAccountManagerTypes>) -> Signal<Int32, NoError> {
return accountManager.transaction { transaction -> Int32 in
if let value = transaction.getNotice(ApplicationSpecificNoticeKeys.videoMessagesPlayOnceSuggestion())?.get(ApplicationSpecificCounterNotice.self) {
return value.value
} else {
return 0
}
}
}
public static func incrementVideoMessagesPlayOnceSuggestion(accountManager: AccountManager<TelegramAccountManagerTypes>, count: Int = 1) -> Signal<Int, NoError> {
return accountManager.transaction { transaction -> Int in
var currentValue: Int32 = 0
if let value = transaction.getNotice(ApplicationSpecificNoticeKeys.videoMessagesPlayOnceSuggestion())?.get(ApplicationSpecificCounterNotice.self) {
currentValue = value.value
}
let previousValue = currentValue
currentValue += Int32(count)
if let entry = CodableEntry(ApplicationSpecificCounterNotice(value: currentValue)) {
transaction.setNotice(ApplicationSpecificNoticeKeys.videoMessagesPlayOnceSuggestion(), entry)
}
return Int(previousValue)
}
}
public static func getIncomingVideoMessagePlayOnceTip(accountManager: AccountManager<TelegramAccountManagerTypes>) -> Signal<Int32, NoError> {
return accountManager.transaction { transaction -> Int32 in
if let value = transaction.getNotice(ApplicationSpecificNoticeKeys.incomingVideoMessagePlayOnceTip())?.get(ApplicationSpecificCounterNotice.self) {
return value.value
} else {
return 0
}
}
}
public static func incrementIncomingVideoMessagePlayOnceTip(accountManager: AccountManager<TelegramAccountManagerTypes>, count: Int = 1) -> Signal<Int, NoError> {
return accountManager.transaction { transaction -> Int in
var currentValue: Int32 = 0
if let value = transaction.getNotice(ApplicationSpecificNoticeKeys.incomingVideoMessagePlayOnceTip())?.get(ApplicationSpecificCounterNotice.self) {
currentValue = value.value
}
let previousValue = currentValue
currentValue += Int32(count)
if let entry = CodableEntry(ApplicationSpecificCounterNotice(value: currentValue)) {
transaction.setNotice(ApplicationSpecificNoticeKeys.incomingVideoMessagePlayOnceTip(), entry)
}
return Int(previousValue)
}
}
public static func getOutgoingVideoMessagePlayOnceTip(accountManager: AccountManager<TelegramAccountManagerTypes>) -> Signal<Int32, NoError> {
return accountManager.transaction { transaction -> Int32 in
if let value = transaction.getNotice(ApplicationSpecificNoticeKeys.outgoingVideoMessagePlayOnceTip())?.get(ApplicationSpecificCounterNotice.self) {
return value.value
} else {
return 0
}
}
}
public static func incrementOutgoingVideoMessagePlayOnceTip(accountManager: AccountManager<TelegramAccountManagerTypes>, count: Int = 1) -> Signal<Int, NoError> {
return accountManager.transaction { transaction -> Int in
var currentValue: Int32 = 0
if let value = transaction.getNotice(ApplicationSpecificNoticeKeys.outgoingVideoMessagePlayOnceTip())?.get(ApplicationSpecificCounterNotice.self) {
currentValue = value.value
}
let previousValue = currentValue
currentValue += Int32(count)
if let entry = CodableEntry(ApplicationSpecificCounterNotice(value: currentValue)) {
transaction.setNotice(ApplicationSpecificNoticeKeys.outgoingVideoMessagePlayOnceTip(), entry)
}
return Int(previousValue)
}
}
}

View File

@ -422,6 +422,8 @@ swift_library(
"//submodules/TelegramUI/Components/Chat/ChatMessageNotificationItem",
"//submodules/Components/MultilineTextComponent",
"//submodules/TelegramUI/Components/PlainButtonComponent",
"//submodules/TelegramUI/Components/VideoMessageCameraScreen",
"//submodules/TelegramUI/Components/MediaScrubberComponent",
] + select({
"@build_bazel_rules_apple//apple:ios_arm64": appcenter_targets,
"//build-system:ios_sim_arm64": [],

View File

@ -1607,7 +1607,6 @@ public class CameraScreen: ViewController {
if case .pendingImage = value {
Queue.mainQueue().async {
self.mainPreviewView.isEnabled = false
self.additionalPreviewView.isEnabled = false
}
} else {

View File

@ -148,6 +148,9 @@ public class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode {
var textString: NSAttributedString?
var updatedContactInfo: String?
var canMessage = false
var canAdd = false
var displayName: String = ""
if let selectedContact = selectedContact {
if !selectedContact.firstName.isEmpty && !selectedContact.lastName.isEmpty {
@ -161,6 +164,10 @@ public class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode {
displayName = item.presentationData.strings.Message_Contact
}
if selectedContact.peerId != nil {
canMessage = true
}
let info: String
if let previousContact = previousContact, previousContact.isEqual(to: selectedContact), let contactInfo = previousContactInfo {
info = contactInfo
@ -199,6 +206,8 @@ public class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode {
}
}
canAdd = !item.associatedData.deviceContactsNumbers.contains(selectedContact.phoneNumber)
updatedContactInfo = info
titleString = NSAttributedString(string: displayName, font: titleFont, textColor: mainColor)
@ -305,10 +314,10 @@ public class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode {
if let statusSuggestedWidthAndContinue = statusSuggestedWidthAndContinue {
maxContentWidth = max(maxContentWidth, statusSuggestedWidthAndContinue.0)
}
maxContentWidth = max(maxContentWidth, avatarSize.width + 7.0 + titleLayout.size.width)
maxContentWidth = max(maxContentWidth, avatarSize.width + 7.0 + textLayout.size.width)
maxContentWidth = max(maxContentWidth, 7.0 + avatarSize.width + 7.0 + titleLayout.size.width + 7.0)
maxContentWidth = max(maxContentWidth, 7.0 + avatarSize.width + 7.0 + textLayout.size.width + 7.0)
maxContentWidth = max(maxContentWidth, maxButtonWidth * 2.0)
maxContentWidth = max(maxContentWidth, 240.0)
maxContentWidth = max(maxContentWidth, 220.0)
let contentWidth = maxContentWidth + layoutConstants.text.bubbleInsets.right * 2.0
@ -316,7 +325,19 @@ public class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode {
let baseAvatarFrame = CGRect(origin: CGPoint(x: layoutConstants.text.bubbleInsets.right, y: layoutConstants.text.bubbleInsets.top), size: avatarSize)
let lineWidth: CGFloat = 3.0
let buttonWidth = floor((boundingWidth - layoutConstants.text.bubbleInsets.right * 2.0 - lineWidth) / 2.0)
var buttonCount = 0
if canMessage {
buttonCount += 1
}
if canAdd {
buttonCount += 1
}
var buttonWidth = floor((boundingWidth - layoutConstants.text.bubbleInsets.right * 2.0 - lineWidth))
if buttonCount > 1 {
buttonWidth /= CGFloat(buttonCount)
}
let (messageButtonSize, messageButtonApply) = messageContinueLayout(buttonWidth, 33.0)
let (addButtonSize, addButtonApply) = addContinueLayout(buttonWidth, 33.0)
@ -329,7 +350,7 @@ public class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode {
layoutSize.height += statusSizeAndApply.0.height - 4.0
}
let messageButtonFrame = CGRect(origin: CGPoint(x: layoutConstants.text.bubbleInsets.right + lineWidth, y: layoutSize.height - 24.0 - messageButtonSize.height), size: messageButtonSize)
let addButtonFrame = CGRect(origin: CGPoint(x: layoutConstants.text.bubbleInsets.right + lineWidth + buttonWidth, y: layoutSize.height - 24.0 - addButtonSize.height), size: addButtonSize)
let addButtonFrame = CGRect(origin: CGPoint(x: layoutConstants.text.bubbleInsets.right + lineWidth + (canMessage ? buttonWidth : 0.0), y: layoutSize.height - 24.0 - addButtonSize.height), size: addButtonSize)
let avatarFrame = baseAvatarFrame.offsetBy(dx: 9.0, dy: 14.0)
var customLetters: [String] = []
@ -362,9 +383,11 @@ public class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode {
strongSelf.titleNode.frame = CGRect(origin: CGPoint(x: avatarFrame.maxX + 7.0, y: avatarFrame.minY + 1.0), size: titleLayout.size)
strongSelf.textNode.frame = CGRect(origin: CGPoint(x: avatarFrame.maxX + 7.0, y: avatarFrame.minY + 20.0), size: textLayout.size)
strongSelf.addButtonNode.frame = addButtonFrame
strongSelf.addButtonNode.frame = addButtonFrame
strongSelf.addButtonNode.isHidden = !canAdd
strongSelf.messageButtonNode.frame = messageButtonFrame
strongSelf.messageButtonNode.isHidden = !canMessage
let backgroundInsets = layoutConstants.text.bubbleInsets
let backgroundFrame = CGRect(origin: CGPoint(x: backgroundInsets.left, y: backgroundInsets.top + 5.0), size: CGSize(width: contentWidth - layoutConstants.text.bubbleInsets.right * 2.0, height: layoutSize.height - 34.0))

View File

@ -179,6 +179,7 @@ public final class ChatTextInputMediaRecordingButton: TGModernConversationInputM
private let context: AccountContext
private var theme: PresentationTheme
private let useDarkTheme: Bool
private let pause: Bool
private let strings: PresentationStrings
public var mode: ChatTextInputMediaRecordingButtonMode = .audio
@ -322,17 +323,18 @@ public final class ChatTextInputMediaRecordingButton: TGModernConversationInputM
if let current = self.micLockValue {
return current
} else {
let lockView = LockView(frame: CGRect(origin: CGPoint(), size: CGSize(width: 40.0, height: 60.0)), theme: self.theme, useDarkTheme: self.useDarkTheme, strings: self.strings)
let lockView = LockView(frame: CGRect(origin: CGPoint(), size: CGSize(width: 40.0, height: 60.0)), theme: self.theme, useDarkTheme: self.useDarkTheme, pause: self.pause, strings: self.strings)
lockView.addTarget(self, action: #selector(handleStopTap), for: .touchUpInside)
self.micLockValue = lockView
return lockView
}
}
public init(context: AccountContext, theme: PresentationTheme, useDarkTheme: Bool = false, strings: PresentationStrings, presentController: @escaping (ViewController) -> Void) {
public init(context: AccountContext, theme: PresentationTheme, useDarkTheme: Bool = false, pause: Bool = false, strings: PresentationStrings, presentController: @escaping (ViewController) -> Void) {
self.context = context
self.theme = theme
self.useDarkTheme = useDarkTheme
self.pause = pause
self.strings = strings
self.animationView = ComponentView<Empty>()
self.presentController = presentController

View File

@ -6,30 +6,33 @@ import TelegramPresentationData
final class LockView: UIButton, TGModernConversationInputMicButtonLock {
private let useDarkTheme: Bool
private let pause: Bool
private let idleView: AnimationView = {
guard let url = getAppBundle().url(forResource: "LockWait", withExtension: "json"), let animation = Animation.filepath(url.path)
else { return AnimationView() }
let view = AnimationView(animation: animation, configuration: LottieConfiguration(renderingEngine: .mainThread, decodingStrategy: .codable))
view.loopMode = .autoReverse
view.backgroundColor = .clear
view.isOpaque = false
return view
}()
private let idleView: AnimationView
private let lockingView: AnimationView
private let lockingView: AnimationView = {
guard let url = getAppBundle().url(forResource: "LockPause", withExtension: "json"), let animation = Animation.filepath(url.path)
else { return AnimationView() }
let view = AnimationView(animation: animation, configuration: LottieConfiguration(renderingEngine: .mainThread, decodingStrategy: .codable))
view.backgroundColor = .clear
view.isOpaque = false
return view
}()
init(frame: CGRect, theme: PresentationTheme, useDarkTheme: Bool = false, strings: PresentationStrings) {
init(frame: CGRect, theme: PresentationTheme, useDarkTheme: Bool = false, pause: Bool = false, strings: PresentationStrings) {
self.useDarkTheme = useDarkTheme
self.pause = pause
if let url = getAppBundle().url(forResource: "LockWait", withExtension: "json"), let animation = Animation.filepath(url.path) {
let view = AnimationView(animation: animation, configuration: LottieConfiguration(renderingEngine: .mainThread, decodingStrategy: .codable))
view.loopMode = .autoReverse
view.backgroundColor = .clear
view.isOpaque = false
self.idleView = view
} else {
self.idleView = AnimationView()
}
if let url = getAppBundle().url(forResource: self.pause ? "LockPause" : "Lock", withExtension: "json"), let animation = Animation.filepath(url.path) {
let view = AnimationView(animation: animation, configuration: LottieConfiguration(renderingEngine: .mainThread, decodingStrategy: .codable))
view.backgroundColor = .clear
view.isOpaque = false
self.lockingView = view
} else {
self.lockingView = AnimationView()
}
super.init(frame: frame)
@ -62,25 +65,33 @@ final class LockView: UIButton, TGModernConversationInputMicButtonLock {
}
func updateTheme(_ theme: PresentationTheme) {
[
"Rectangle.Заливка 1": theme.chat.inputPanel.panelBackgroundColor,
"Rectangle.Rectangle.Обводка 1": theme.chat.inputPanel.panelControlAccentColor,
"Rectangle 2.Rectangle.Обводка 1": theme.chat.inputPanel.panelControlAccentColor,
"Path.Path.Обводка 1": theme.chat.inputPanel.panelControlAccentColor,
"Path 4.Path 4.Обводка 1": theme.chat.inputPanel.panelControlAccentColor
].forEach { key, value in
idleView.setValueProvider(ColorValueProvider(value.lottieColorValue), keypath: AnimationKeypath(keypath: "\(key).Color"))
// [
// "Rectangle.Заливка 1": theme.chat.inputPanel.panelBackgroundColor,
// "Rectangle.Rectangle.Обводка 1": theme.chat.inputPanel.panelControlAccentColor,
// "Rectangle 2.Rectangle.Обводка 1": theme.chat.inputPanel.panelControlAccentColor,
// "Path.Path.Обводка 1": theme.chat.inputPanel.panelControlAccentColor,
// "Path 4.Path 4.Обводка 1": theme.chat.inputPanel.panelControlAccentColor
// ].forEach { key, value in
// idleView.setValueProvider(ColorValueProvider(value.lottieColorValue), keypath: AnimationKeypath(keypath: "\(key).Color"))
// }
//
for keypath in idleView.allKeypaths(predicate: { $0.keys.last == "Color" }) {
idleView.setValueProvider(ColorValueProvider(theme.chat.inputPanel.panelControlAccentColor.lottieColorValue), keypath: AnimationKeypath(keypath: keypath))
}
[
"Path.Path.Обводка 1": theme.chat.inputPanel.panelControlAccentColor,
"Path.Path.Заливка 1": theme.chat.inputPanel.panelBackgroundColor.withAlphaComponent(1.0),
"Rectangle.Rectangle.Обводка 1": theme.chat.inputPanel.panelControlAccentColor,
"Rectangle.Заливка 1": theme.chat.inputPanel.panelControlAccentColor,
"Path 4.Path 4.Обводка 1": theme.chat.inputPanel.panelControlAccentColor
].forEach { key, value in
lockingView.setValueProvider(ColorValueProvider(value.lottieColorValue), keypath: AnimationKeypath(keypath: "\(key).Color"))
for keypath in lockingView.allKeypaths(predicate: { $0.keys.last == "Color" }) {
lockingView.setValueProvider(ColorValueProvider(theme.chat.inputPanel.panelControlAccentColor.lottieColorValue), keypath: AnimationKeypath(keypath: keypath))
}
//
// [
// "Path.Path.Обводка 1": theme.chat.inputPanel.panelControlAccentColor,
// "Path.Path.Заливка 1": theme.chat.inputPanel.panelBackgroundColor.withAlphaComponent(1.0),
// "Rectangle.Rectangle.Обводка 1": theme.chat.inputPanel.panelControlAccentColor,
// "Rectangle.Заливка 1": theme.chat.inputPanel.panelControlAccentColor,
// "Path 4.Path 4.Обводка 1": theme.chat.inputPanel.panelControlAccentColor
// ].forEach { key, value in
// lockingView.setValueProvider(ColorValueProvider(value.lottieColorValue), keypath: AnimationKeypath(keypath: "\(key).Color"))
// }
}
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {

View File

@ -1736,7 +1736,7 @@ public final class MediaEditor {
}
}
private func videoFrames(asset: AVAsset, count: Int, mirror: Bool = false) -> Signal<([UIImage], Double), NoError> {
public func videoFrames(asset: AVAsset, count: Int, mirror: Bool = false) -> Signal<([UIImage], Double), NoError> {
func blurredImage(_ image: UIImage) -> UIImage? {
guard let image = image.cgImage else {
return nil

View File

@ -136,7 +136,7 @@ public enum MediaQualityPreset: Int32 {
case .compressedVeryHigh:
return 1920.0
case .videoMessage:
return 384.0
return 400.0
case .profileLow:
return 720.0
case .profile, .profileHigh, .profileVeryHigh:

View File

@ -49,6 +49,7 @@ swift_library(
"//submodules/TelegramUI/Components/VolumeSliderContextItem",
"//submodules/TelegramUI/Components/Stories/ForwardInfoPanelComponent",
"//submodules/TelegramUI/Components/ContextReferenceButtonComponent",
"//submodules/TelegramUI/Components/MediaScrubberComponent",
],
visibility = [
"//visibility:public",

View File

@ -39,6 +39,7 @@ import VolumeSliderContextItem
import TelegramStringFormatting
import ForwardInfoPanelComponent
import ContextReferenceButtonComponent
import MediaScrubberComponent
private let playbackButtonTag = GenericComponentViewTag()
private let muteButtonTag = GenericComponentViewTag()
@ -1359,6 +1360,7 @@ final class MediaEditorScreenComponent: Component {
transition: scrubberTransition,
component: AnyComponent(MediaScrubberComponent(
context: component.context,
style: .editor,
generationTimestamp: playerState.generationTimestamp,
position: playerState.position,
minDuration: minDuration,
@ -6081,3 +6083,23 @@ private func setupButtonShadow(_ view: UIView, radius: CGFloat = 2.0) {
view.layer.shadowColor = UIColor.black.cgColor
view.layer.shadowOpacity = 0.35
}
extension MediaScrubberComponent.Track {
public init(_ track: MediaEditorPlayerState.Track) {
let content: MediaScrubberComponent.Track.Content
switch track.content {
case let .video(frames, framesUpdateTimestamp):
content = .video(frames: frames, framesUpdateTimestamp: framesUpdateTimestamp)
case let .audio(artist, title, samples, peak):
content = .audio(artist: artist, title: title, samples: samples, peak: peak)
}
self.init(
id: track.id,
content: content,
duration: track.duration,
trimRange: track.trimRange,
offset: track.offset,
isMain: track.isMain
)
}
}

View File

@ -0,0 +1,25 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
swift_library(
name = "MediaScrubberComponent",
module_name = "MediaScrubberComponent",
srcs = glob([
"Sources/**/*.swift",
]),
copts = [
"-warnings-as-errors",
],
deps = [
"//submodules/Display",
"//submodules/ComponentFlow",
"//submodules/TelegramPresentationData",
"//submodules/AccountContext",
"//submodules/Components/ComponentDisplayAdapters",
"//submodules/Components/MultilineTextComponent",
"//submodules/TelegramUI/Components/MediaEditor",
"//submodules/TelegramUI/Components/AudioWaveformComponent",
],
visibility = [
"//visibility:public",
],
)

View File

@ -4,7 +4,6 @@ import Display
import AsyncDisplayKit
import ComponentFlow
import SwiftSignalKit
import ViewControllerComponent
import ComponentDisplayAdapters
import TelegramPresentationData
import AccountContext
@ -17,17 +16,16 @@ private let trackHeight: CGFloat = 39.0
private let collapsedTrackHeight: CGFloat = 26.0
private let trackSpacing: CGFloat = 4.0
private let borderHeight: CGFloat = 1.0 + UIScreenPixel
private let frameWidth: CGFloat = 24.0
final class MediaScrubberComponent: Component {
typealias EnvironmentType = Empty
public final class MediaScrubberComponent: Component {
public typealias EnvironmentType = Empty
struct Track: Equatable {
enum Content: Equatable {
public struct Track: Equatable {
public enum Content: Equatable {
case video(frames: [UIImage], framesUpdateTimestamp: Double)
case audio(artist: String?, title: String?, samples: Data?, peak: Int32)
static func ==(lhs: Content, rhs: Content) -> Bool {
public static func ==(lhs: Content, rhs: Content) -> Bool {
switch lhs {
case let .video(_, framesUpdateTimestamp):
if case .video(_, framesUpdateTimestamp) = rhs {
@ -45,29 +43,38 @@ final class MediaScrubberComponent: Component {
}
}
let id: Int32
let content: Content
let duration: Double
let trimRange: Range<Double>?
let offset: Double?
let isMain: Bool
init(_ track: MediaEditorPlayerState.Track) {
self.id = track.id
switch track.content {
case let .video(frames, framesUpdateTimestamp):
self.content = .video(frames: frames, framesUpdateTimestamp: framesUpdateTimestamp)
case let .audio(artist, title, samples, peak):
self.content = .audio(artist: artist, title: title, samples: samples, peak: peak)
}
self.duration = track.duration
self.trimRange = track.trimRange
self.offset = track.offset
self.isMain = track.isMain
public let id: Int32
public let content: Content
public let duration: Double
public let trimRange: Range<Double>?
public let offset: Double?
public let isMain: Bool
public init(
id: Int32,
content: Content,
duration: Double,
trimRange: Range<Double>?,
offset: Double?,
isMain: Bool
) {
self.id = id
self.content = content
self.duration = duration
self.trimRange = trimRange
self.offset = offset
self.isMain = isMain
}
}
public enum Style {
case editor
case videoMessage
}
let context: AccountContext
let style: Style
let generationTimestamp: Double
let position: Double
@ -77,13 +84,14 @@ final class MediaScrubberComponent: Component {
let tracks: [Track]
let positionUpdated: (Double, Bool) -> Void
let positionUpdated: (Double, Bool) -> Void
let trackTrimUpdated: (Int32, Double, Double, Bool, Bool) -> Void
let trackOffsetUpdated: (Int32, Double, Bool) -> Void
let trackLongPressed: (Int32, UIView) -> Void
init(
public init(
context: AccountContext,
style: Style,
generationTimestamp: Double,
position: Double,
minDuration: Double,
@ -96,6 +104,7 @@ final class MediaScrubberComponent: Component {
trackLongPressed: @escaping (Int32, UIView) -> Void
) {
self.context = context
self.style = style
self.generationTimestamp = generationTimestamp
self.position = position
self.minDuration = minDuration
@ -108,7 +117,7 @@ final class MediaScrubberComponent: Component {
self.trackLongPressed = trackLongPressed
}
static func ==(lhs: MediaScrubberComponent, rhs: MediaScrubberComponent) -> Bool {
public static func ==(lhs: MediaScrubberComponent, rhs: MediaScrubberComponent) -> Bool {
if lhs.context !== rhs.context {
return false
}
@ -133,7 +142,7 @@ final class MediaScrubberComponent: Component {
return true
}
final class View: UIView, UIGestureRecognizerDelegate {
public final class View: UIView, UIGestureRecognizerDelegate {
private var trackViews: [Int32: TrackView] = [:]
private let trimView: TrimView
private let ghostTrimView: TrimView
@ -260,7 +269,7 @@ final class MediaScrubberComponent: Component {
guard let component = self.component, let firstTrack = component.tracks.first else {
return 0.0
}
return firstTrack.trimRange?.upperBound ?? min(firstTrack.duration, storyMaxVideoDuration)
return firstTrack.trimRange?.upperBound ?? min(firstTrack.duration, component.maxDuration)
}
private var mainAudioTrackOffset: Double? {
@ -364,11 +373,18 @@ final class MediaScrubberComponent: Component {
self.cursorView.frame = cursorFrame(size: scrubberSize, height: self.effectiveCursorHeight, position: updatedPosition, duration: self.trimDuration)
}
func update(component: MediaScrubberComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<EnvironmentType>, transition: Transition) -> CGSize {
public func update(component: MediaScrubberComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<EnvironmentType>, transition: Transition) -> CGSize {
let isFirstTime = self.component == nil
self.component = component
self.state = state
switch component.style {
case .editor:
self.cursorView.isHidden = false
case .videoMessage:
self.cursorView.isHidden = true
}
var totalHeight: CGFloat = 0.0
var trackLayout: [Int32: (CGRect, Transition, Bool)] = [:]
@ -427,6 +443,7 @@ final class MediaScrubberComponent: Component {
let trackSize = trackView.update(
context: component.context,
style: component.style,
track: track,
isSelected: id == self.selectedTrackId,
availableSize: availableSize,
@ -495,12 +512,20 @@ final class MediaScrubberComponent: Component {
}
}
let scrubberSize = CGSize(width: availableSize.width, height: trackHeight)
let fullTrackHeight: CGFloat
switch component.style {
case .editor:
fullTrackHeight = trackHeight
case .videoMessage:
fullTrackHeight = 33.0
}
let scrubberSize = CGSize(width: availableSize.width, height: fullTrackHeight)
self.trimView.isHollow = self.selectedTrackId != lowestVideoId || self.isAudioOnly
let (leftHandleFrame, rightHandleFrame) = self.trimView.update(
style: component.style,
visualInsets: trimViewVisualInsets,
scrubberSize: CGSize(width: trackViewWidth, height: trackHeight),
scrubberSize: CGSize(width: trackViewWidth, height: fullTrackHeight),
duration: mainTrimDuration,
startPosition: startPosition,
endPosition: endPosition,
@ -511,6 +536,7 @@ final class MediaScrubberComponent: Component {
)
let (ghostLeftHandleFrame, ghostRightHandleFrame) = self.ghostTrimView.update(
style: component.style,
visualInsets: .zero,
scrubberSize: CGSize(width: scrubberSize.width, height: collapsedTrackHeight),
duration: self.duration,
@ -591,7 +617,7 @@ final class MediaScrubberComponent: Component {
return CGSize(width: availableSize.width, height: totalHeight)
}
override func point(inside point: CGPoint, with event: UIEvent?) -> Bool {
public override func point(inside point: CGPoint, with event: UIEvent?) -> Bool {
let hitTestSlop = UIEdgeInsets(top: -8.0, left: -9.0, bottom: -8.0, right: -9.0)
return self.bounds.inset(by: hitTestSlop).contains(point)
}
@ -683,11 +709,9 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega
self.videoTransparentFramesContainer.alpha = 0.5
self.videoTransparentFramesContainer.clipsToBounds = true
self.videoTransparentFramesContainer.layer.cornerRadius = 9.0
self.videoTransparentFramesContainer.isUserInteractionEnabled = false
self.videoOpaqueFramesContainer.clipsToBounds = true
self.videoOpaqueFramesContainer.layer.cornerRadius = 9.0
self.videoOpaqueFramesContainer.isUserInteractionEnabled = false
self.addSubview(self.clippingView)
@ -760,6 +784,7 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega
func update(
context: AccountContext,
style: MediaScrubberComponent.Style,
track: MediaScrubberComponent.Track,
isSelected: Bool,
availableSize: CGSize,
@ -769,7 +794,20 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega
let previousParams = self.params
self.params = (track, isSelected, duration)
let scrubberSize = CGSize(width: availableSize.width, height: isSelected ? trackHeight : collapsedTrackHeight)
let fullTrackHeight: CGFloat
let framesCornerRadius: CGFloat
switch style {
case .editor:
fullTrackHeight = trackHeight
framesCornerRadius = 9.0
case .videoMessage:
fullTrackHeight = 33.0
framesCornerRadius = fullTrackHeight / 2.0
}
self.videoTransparentFramesContainer.layer.cornerRadius = framesCornerRadius
self.videoOpaqueFramesContainer.layer.cornerRadius = framesCornerRadius
let scrubberSize = CGSize(width: availableSize.width, height: isSelected ? fullTrackHeight : collapsedTrackHeight)
var screenSpanDuration = duration
if track.isAudio && track.isMain {
@ -891,11 +929,18 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega
transparentFrameLayer = VideoFrameLayer()
transparentFrameLayer.masksToBounds = true
transparentFrameLayer.contentsGravity = .resizeAspectFill
if case .videoMessage = style {
transparentFrameLayer.contentsRect = CGRect(origin: .zero, size: CGSize(width: 1.0, height: 1.0)).insetBy(dx: 0.15, dy: 0.15)
}
self.videoTransparentFramesContainer.layer.addSublayer(transparentFrameLayer)
self.videoTransparentFrameLayers.append(transparentFrameLayer)
opaqueFrameLayer = VideoFrameLayer()
opaqueFrameLayer.masksToBounds = true
opaqueFrameLayer.contentsGravity = .resizeAspectFill
if case .videoMessage = style {
opaqueFrameLayer.contentsRect = CGRect(origin: .zero, size: CGSize(width: 1.0, height: 1.0)).insetBy(dx: 0.15, dy: 0.15)
}
self.videoOpaqueFramesContainer.layer.addSublayer(opaqueFrameLayer)
self.videoOpaqueFrameLayers.append(opaqueFrameLayer)
} else {
@ -927,7 +972,7 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega
if let image = frames.first, image.size.height > 0.0 {
frameAspectRatio = max(0.66, image.size.width / image.size.height)
}
let frameSize = CGSize(width: trackHeight * frameAspectRatio, height: trackHeight)
let frameSize = CGSize(width: fullTrackHeight * frameAspectRatio, height: fullTrackHeight)
var frameOffset: CGFloat = 0.0
for i in 0 ..< frames.count {
if i < self.videoTransparentFrameLayers.count {
@ -1052,7 +1097,7 @@ private class TrackView: UIView, UIScrollViewDelegate, UIGestureRecognizerDelega
)
),
environment: {},
containerSize: CGSize(width: containerFrame.width, height: trackHeight)
containerSize: CGSize(width: containerFrame.width, height: fullTrackHeight)
)
if let view = self.audioWaveform.view as? AudioWaveformComponent.View {
if view.superview == nil {
@ -1090,54 +1135,29 @@ private class TrimView: UIView {
override init(frame: CGRect) {
super.init(frame: .zero)
let height = trackHeight
let handleImage = generateImage(CGSize(width: handleWidth, height: height), rotatedContext: { size, context in
context.clear(CGRect(origin: .zero, size: size))
context.setFillColor(UIColor.white.cgColor)
let path = UIBezierPath(roundedRect: CGRect(origin: .zero, size: CGSize(width: size.width * 2.0, height: size.height)), cornerRadius: 9.0)
context.addPath(path.cgPath)
context.fillPath()
context.setBlendMode(.clear)
let innerPath = UIBezierPath(roundedRect: CGRect(origin: CGPoint(x: handleWidth - 3.0, y: borderHeight), size: CGSize(width: handleWidth, height: size.height - borderHeight * 2.0)), cornerRadius: 2.0)
context.addPath(innerPath.cgPath)
context.fillPath()
})?.withRenderingMode(.alwaysTemplate).resizableImage(withCapInsets: UIEdgeInsets(top: 10.0, left: 0.0, bottom: 10.0, right: 0.0))
self.zoneView.image = UIImage()
self.zoneView.isUserInteractionEnabled = true
self.zoneView.hitTestSlop = UIEdgeInsets(top: -8.0, left: 0.0, bottom: -8.0, right: 0.0)
self.leftHandleView.image = handleImage
self.leftHandleView.isUserInteractionEnabled = true
self.leftHandleView.tintColor = .white
self.leftHandleView.contentMode = .scaleToFill
self.leftHandleView.hitTestSlop = UIEdgeInsets(top: -8.0, left: -9.0, bottom: -8.0, right: -9.0)
self.rightHandleView.image = handleImage
self.rightHandleView.transform = CGAffineTransform(scaleX: -1.0, y: 1.0)
self.rightHandleView.isUserInteractionEnabled = true
self.rightHandleView.tintColor = .white
self.rightHandleView.contentMode = .scaleToFill
self.rightHandleView.hitTestSlop = UIEdgeInsets(top: -8.0, left: -9.0, bottom: -8.0, right: -9.0)
self.borderView.image = generateImage(CGSize(width: 1.0, height: height), rotatedContext: { size, context in
context.clear(CGRect(origin: .zero, size: size))
context.setFillColor(UIColor.white.cgColor)
context.fill(CGRect(origin: .zero, size: CGSize(width: size.width, height: borderHeight)))
context.fill(CGRect(origin: CGPoint(x: 0.0, y: size.height - borderHeight), size: CGSize(width: size.width, height: height)))
})?.withRenderingMode(.alwaysTemplate).resizableImage(withCapInsets: UIEdgeInsets(top: 10.0, left: 0.0, bottom: 10.0, right: 0.0))
self.borderView.tintColor = .white
self.borderView.isUserInteractionEnabled = false
self.leftCapsuleView.clipsToBounds = true
self.leftCapsuleView.layer.cornerRadius = 1.0
self.leftCapsuleView.backgroundColor = UIColor(rgb: 0x343436)
self.rightCapsuleView.clipsToBounds = true
self.rightCapsuleView.layer.cornerRadius = 1.0
self.rightCapsuleView.backgroundColor = UIColor(rgb: 0x343436)
self.addSubview(self.zoneView)
self.addSubview(self.leftHandleView)
@ -1279,6 +1299,7 @@ private class TrimView: UIView {
)?
func update(
style: MediaScrubberComponent.Style,
visualInsets: UIEdgeInsets,
scrubberSize: CGSize,
duration: Double,
@ -1288,34 +1309,101 @@ private class TrimView: UIView {
minDuration: Double,
maxDuration: Double,
transition: Transition
) -> (leftHandleFrame: CGRect, rightHandleFrame: CGRect)
{
) -> (leftHandleFrame: CGRect, rightHandleFrame: CGRect) {
let isFirstTime = self.params == nil
self.params = (scrubberSize, duration, startPosition, endPosition, position, minDuration, maxDuration)
let trimColor = self.isPanningTrimHandle ? UIColor(rgb: 0xf8d74a) : .white
let effectiveHandleWidth: CGFloat
let fullTrackHeight: CGFloat
let capsuleOffset: CGFloat
let color: UIColor
let highlightColor: UIColor
switch style {
case .editor:
effectiveHandleWidth = handleWidth
fullTrackHeight = trackHeight
capsuleOffset = 5.0 - UIScreenPixel
color = .white
highlightColor = UIColor(rgb: 0xf8d74a)
if isFirstTime {
self.borderView.image = generateImage(CGSize(width: 1.0, height: fullTrackHeight), rotatedContext: { size, context in
context.clear(CGRect(origin: .zero, size: size))
context.setFillColor(UIColor.white.cgColor)
context.fill(CGRect(origin: .zero, size: CGSize(width: size.width, height: borderHeight)))
context.fill(CGRect(origin: CGPoint(x: 0.0, y: size.height - borderHeight), size: CGSize(width: size.width, height: fullTrackHeight)))
})?.withRenderingMode(.alwaysTemplate).resizableImage(withCapInsets: UIEdgeInsets(top: 10.0, left: 0.0, bottom: 10.0, right: 0.0))
let handleImage = generateImage(CGSize(width: handleWidth, height: fullTrackHeight), rotatedContext: { size, context in
context.clear(CGRect(origin: .zero, size: size))
context.setFillColor(UIColor.white.cgColor)
let path = UIBezierPath(roundedRect: CGRect(origin: .zero, size: CGSize(width: size.width * 2.0, height: size.height)), cornerRadius: 9.0)
context.addPath(path.cgPath)
context.fillPath()
context.setBlendMode(.clear)
let innerPath = UIBezierPath(roundedRect: CGRect(origin: CGPoint(x: handleWidth - 3.0, y: borderHeight), size: CGSize(width: handleWidth, height: size.height - borderHeight * 2.0)), cornerRadius: 2.0)
context.addPath(innerPath.cgPath)
context.fillPath()
})?.withRenderingMode(.alwaysTemplate).resizableImage(withCapInsets: UIEdgeInsets(top: 10.0, left: 0.0, bottom: 10.0, right: 0.0))
self.leftHandleView.image = handleImage
self.rightHandleView.image = handleImage
self.leftCapsuleView.backgroundColor = UIColor(rgb: 0x343436)
self.rightCapsuleView.backgroundColor = UIColor(rgb: 0x343436)
}
case .videoMessage:
effectiveHandleWidth = 16.0
fullTrackHeight = 33.0
capsuleOffset = 8.0
color = UIColor(rgb: 0x3478f6)
highlightColor = UIColor(rgb: 0x3478f6)
if isFirstTime {
let handleImage = generateImage(CGSize(width: effectiveHandleWidth, height: fullTrackHeight), rotatedContext: { size, context in
context.clear(CGRect(origin: .zero, size: size))
context.setFillColor(UIColor.white.cgColor)
let path = UIBezierPath(roundedRect: CGRect(origin: .zero, size: CGSize(width: size.width * 2.0, height: size.height)), cornerRadius: 16.5)
context.addPath(path.cgPath)
context.fillPath()
})?.withRenderingMode(.alwaysTemplate)
self.leftHandleView.image = handleImage
self.rightHandleView.image = handleImage
self.leftCapsuleView.backgroundColor = .white
self.rightCapsuleView.backgroundColor = .white
}
}
let trimColor = self.isPanningTrimHandle ? highlightColor : color
transition.setTintColor(view: self.leftHandleView, color: trimColor)
transition.setTintColor(view: self.rightHandleView, color: trimColor)
transition.setTintColor(view: self.borderView, color: trimColor)
let totalWidth = scrubberSize.width
let totalRange = totalWidth - handleWidth
let totalRange = totalWidth - effectiveHandleWidth
let leftHandlePositionFraction = duration > 0.0 ? startPosition / duration : 0.0
let leftHandlePosition = floorToScreenPixels(handleWidth / 2.0 + totalRange * leftHandlePositionFraction)
let leftHandlePosition = floorToScreenPixels(effectiveHandleWidth / 2.0 + totalRange * leftHandlePositionFraction)
var leftHandleFrame = CGRect(origin: CGPoint(x: leftHandlePosition - handleWidth / 2.0, y: 0.0), size: CGSize(width: handleWidth, height: scrubberSize.height))
var leftHandleFrame = CGRect(origin: CGPoint(x: leftHandlePosition - effectiveHandleWidth / 2.0, y: 0.0), size: CGSize(width: effectiveHandleWidth, height: scrubberSize.height))
leftHandleFrame.origin.x = max(leftHandleFrame.origin.x, visualInsets.left)
transition.setFrame(view: self.leftHandleView, frame: leftHandleFrame)
let rightHandlePositionFraction = duration > 0.0 ? endPosition / duration : 1.0
let rightHandlePosition = floorToScreenPixels(handleWidth / 2.0 + totalRange * rightHandlePositionFraction)
let rightHandlePosition = floorToScreenPixels(effectiveHandleWidth / 2.0 + totalRange * rightHandlePositionFraction)
var rightHandleFrame = CGRect(origin: CGPoint(x: max(leftHandleFrame.maxX, rightHandlePosition - handleWidth / 2.0), y: 0.0), size: CGSize(width: handleWidth, height: scrubberSize.height))
rightHandleFrame.origin.x = min(rightHandleFrame.origin.x, totalWidth - visualInsets.right - handleWidth)
var rightHandleFrame = CGRect(origin: CGPoint(x: max(leftHandleFrame.maxX, rightHandlePosition - effectiveHandleWidth / 2.0), y: 0.0), size: CGSize(width: effectiveHandleWidth, height: scrubberSize.height))
rightHandleFrame.origin.x = min(rightHandleFrame.origin.x, totalWidth - visualInsets.right - effectiveHandleWidth)
transition.setFrame(view: self.rightHandleView, frame: rightHandleFrame)
let capsuleSize = CGSize(width: 2.0, height: 11.0)
transition.setFrame(view: self.leftCapsuleView, frame: CGRect(origin: CGPoint(x: 5.0 - UIScreenPixel, y: floorToScreenPixels((leftHandleFrame.height - capsuleSize.height) / 2.0)), size: capsuleSize))
transition.setFrame(view: self.rightCapsuleView, frame: CGRect(origin: CGPoint(x: 5.0 - UIScreenPixel, y: floorToScreenPixels((leftHandleFrame.height - capsuleSize.height) / 2.0)), size: capsuleSize))
transition.setFrame(view: self.leftCapsuleView, frame: CGRect(origin: CGPoint(x: capsuleOffset, y: floorToScreenPixels((leftHandleFrame.height - capsuleSize.height) / 2.0)), size: capsuleSize))
transition.setFrame(view: self.rightCapsuleView, frame: CGRect(origin: CGPoint(x: capsuleOffset, y: floorToScreenPixels((leftHandleFrame.height - capsuleSize.height) / 2.0)), size: capsuleSize))
let zoneFrame = CGRect(x: leftHandleFrame.maxX, y: 0.0, width: rightHandleFrame.minX - leftHandleFrame.maxX, height: scrubberSize.height)
transition.setFrame(view: self.zoneView, frame: zoneFrame)
@ -1345,7 +1433,7 @@ private class VideoFrameLayer: SimpleShapeLayer {
override func layoutSublayers() {
super.layoutSublayers()
if self.stripeLayer.superlayer == nil {
self.stripeLayer.backgroundColor = UIColor(rgb: 0x000000, alpha: 0.3).cgColor
self.addSublayer(self.stripeLayer)

View File

@ -333,6 +333,7 @@ public final class MessageInputActionButtonComponent: Component {
context: component.context,
theme: defaultDarkPresentationTheme,
useDarkTheme: true,
pause: false,
strings: component.strings,
presentController: component.presentController
)

View File

@ -104,7 +104,7 @@ public final class MediaPreviewPanelComponent: Component {
if lhs.strings !== rhs.strings {
return false
}
if lhs.mediaPreview !== rhs.mediaPreview {
if lhs.mediaPreview != rhs.mediaPreview {
return false
}
if lhs.insets != rhs.insets {
@ -199,7 +199,7 @@ public final class MediaPreviewPanelComponent: Component {
}
@objc private func playPauseButtonPressed() {
guard let component = self.component else {
guard let component = self.component, case let .audio(audio) = component.mediaPreview else {
return
}
@ -212,7 +212,7 @@ public final class MediaPreviewPanelComponent: Component {
postbox: component.context.account.postbox,
userLocation: .other,
userContentType: .audio,
resourceReference: .standalone(resource: component.mediaPreview.resource),
resourceReference: .standalone(resource: audio.resource),
streamable: .none,
video: false,
preferSoftwareDecoding: false,
@ -231,8 +231,8 @@ public final class MediaPreviewPanelComponent: Component {
}
func update(component: MediaPreviewPanelComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
if self.component == nil {
self.timerTextValue = textForDuration(seconds: component.mediaPreview.duration)
if self.component == nil, case let .audio(audio) = component.mediaPreview {
self.timerTextValue = textForDuration(seconds: audio.duration)
}
self.component = component
@ -263,69 +263,71 @@ public final class MediaPreviewPanelComponent: Component {
let waveformFrame = CGRect(origin: CGPoint(x: component.insets.left + 47.0, y: component.insets.top + floor((availableSize.height - component.insets.top - component.insets.bottom - 24.0) * 0.5)), size: CGSize(width: availableSize.width - component.insets.right - 47.0 - (component.insets.left + 47.0), height: 24.0))
let _ = self.waveform.update(
transition: transition,
component: AnyComponent(AudioWaveformComponent(
backgroundColor: UIColor.white.withAlphaComponent(0.1),
foregroundColor: UIColor.white.withAlphaComponent(1.0),
shimmerColor: nil,
style: .middle,
samples: component.mediaPreview.waveform.samples,
peak: component.mediaPreview.waveform.peak,
status: self.mediaPlayerStatus.get() |> map { value -> MediaPlayerStatus in
if let value {
return value
} else {
return MediaPlayerStatus(
generationTimestamp: 0.0,
duration: 0.0,
dimensions: CGSize(),
timestamp: 0.0,
baseRate: 1.0,
seekId: 0,
status: .paused,
soundEnabled: true
)
if case let .audio(audio) = component.mediaPreview {
let _ = self.waveform.update(
transition: transition,
component: AnyComponent(AudioWaveformComponent(
backgroundColor: UIColor.white.withAlphaComponent(0.1),
foregroundColor: UIColor.white.withAlphaComponent(1.0),
shimmerColor: nil,
style: .middle,
samples: audio.waveform.samples,
peak: audio.waveform.peak,
status: self.mediaPlayerStatus.get() |> map { value -> MediaPlayerStatus in
if let value {
return value
} else {
return MediaPlayerStatus(
generationTimestamp: 0.0,
duration: 0.0,
dimensions: CGSize(),
timestamp: 0.0,
baseRate: 1.0,
seekId: 0,
status: .paused,
soundEnabled: true
)
}
},
isViewOnceMessage: false,
seek: { [weak self] timestamp in
guard let self, let mediaPlayer = self.mediaPlayer else {
return
}
mediaPlayer.seek(timestamp: timestamp)
},
updateIsSeeking: { [weak self] isSeeking in
guard let self, let mediaPlayer = self.mediaPlayer else {
return
}
if isSeeking {
mediaPlayer.pause()
} else {
mediaPlayer.play()
}
}
},
isViewOnceMessage: false,
seek: { [weak self] timestamp in
guard let self, let mediaPlayer = self.mediaPlayer else {
return
}
mediaPlayer.seek(timestamp: timestamp)
},
updateIsSeeking: { [weak self] isSeeking in
guard let self, let mediaPlayer = self.mediaPlayer else {
return
}
if isSeeking {
mediaPlayer.pause()
} else {
mediaPlayer.play()
}
}
)),
environment: {},
containerSize: waveformFrame.size
)
let _ = self.vibrancyWaveform.update(
transition: transition,
component: AnyComponent(AudioWaveformComponent(
backgroundColor: .white,
foregroundColor: .white,
shimmerColor: nil,
style: .middle,
samples: component.mediaPreview.waveform.samples,
peak: component.mediaPreview.waveform.peak,
status: .complete(),
isViewOnceMessage: false,
seek: nil,
updateIsSeeking: nil
)),
environment: {},
containerSize: waveformFrame.size
)
)),
environment: {},
containerSize: waveformFrame.size
)
let _ = self.vibrancyWaveform.update(
transition: transition,
component: AnyComponent(AudioWaveformComponent(
backgroundColor: .white,
foregroundColor: .white,
shimmerColor: nil,
style: .middle,
samples: audio.waveform.samples,
peak: audio.waveform.peak,
status: .complete(),
isViewOnceMessage: false,
seek: nil,
updateIsSeeking: nil
)),
environment: {},
containerSize: waveformFrame.size
)
}
if let waveformView = self.waveform.view as? AudioWaveformComponent.View {
if waveformView.superview == nil {

View File

@ -313,7 +313,7 @@ public final class MessageInputPanelComponent: Component {
if lhs.wasRecordingDismissed != rhs.wasRecordingDismissed {
return false
}
if lhs.recordedAudioPreview !== rhs.recordedAudioPreview {
if lhs.recordedAudioPreview != rhs.recordedAudioPreview {
return false
}
if lhs.hasRecordedVideoPreview != rhs.hasRecordedVideoPreview {

View File

@ -16,6 +16,8 @@ public final class PlainButtonComponent: Component {
public let contentInsets: UIEdgeInsets
public let action: () -> Void
public let isEnabled: Bool
public let animateAlpha: Bool
public let tag: AnyObject?
public init(
content: AnyComponent<Empty>,
@ -23,7 +25,9 @@ public final class PlainButtonComponent: Component {
minSize: CGSize? = nil,
contentInsets: UIEdgeInsets = UIEdgeInsets(),
action: @escaping () -> Void,
isEnabled: Bool = true
isEnabled: Bool = true,
animateAlpha: Bool = true,
tag : AnyObject? = nil
) {
self.content = content
self.effectAlignment = effectAlignment
@ -31,8 +35,10 @@ public final class PlainButtonComponent: Component {
self.contentInsets = contentInsets
self.action = action
self.isEnabled = isEnabled
self.animateAlpha = animateAlpha
self.tag = tag
}
public static func ==(lhs: PlainButtonComponent, rhs: PlainButtonComponent) -> Bool {
if lhs.content != rhs.content {
return false
@ -49,10 +55,26 @@ public final class PlainButtonComponent: Component {
if lhs.isEnabled != rhs.isEnabled {
return false
}
if lhs.animateAlpha != rhs.animateAlpha {
return false
}
if lhs.tag !== rhs.tag {
return false
}
return true
}
public final class View: HighlightTrackingButton {
public final class View: HighlightTrackingButton, ComponentTaggedView {
public func matches(tag: Any) -> Bool {
if let component = self.component, let componentTag = component.tag {
let tag = tag as AnyObject
if componentTag === tag {
return true
}
}
return false
}
private var component: PlainButtonComponent?
private weak var componentState: EmptyComponentState?
@ -73,18 +95,25 @@ public final class PlainButtonComponent: Component {
self.highligthedChanged = { [weak self] highlighted in
if let self, self.bounds.width > 0.0 {
let animateAlpha = self.component?.animateAlpha ?? true
let topScale: CGFloat = (self.bounds.width - 8.0) / self.bounds.width
let maxScale: CGFloat = (self.bounds.width + 2.0) / self.bounds.width
if highlighted {
self.contentContainer.layer.removeAnimation(forKey: "opacity")
self.contentContainer.layer.removeAnimation(forKey: "sublayerTransform")
self.contentContainer.alpha = 0.7
if animateAlpha {
self.contentContainer.alpha = 0.7
}
let transition = Transition(animation: .curve(duration: 0.2, curve: .easeInOut))
transition.setScale(layer: self.contentContainer.layer, scale: topScale)
} else {
self.contentContainer.alpha = 1.0
self.contentContainer.layer.animateAlpha(from: 0.7, to: 1.0, duration: 0.2)
if animateAlpha {
self.contentContainer.alpha = 1.0
self.contentContainer.layer.animateAlpha(from: 0.7, to: 1.0, duration: 0.2)
}
let transition = Transition(animation: .none)
transition.setScale(layer: self.contentContainer.layer, scale: 1.0)

View File

@ -481,17 +481,46 @@ public func fetchLocalFileVideoMediaResource(postbox: Postbox, resource: LocalFi
let signal = Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> { subscriber in
subscriber.putNext(.reset)
var filteredPath = resource.path
if filteredPath.hasPrefix("file://") {
filteredPath = String(filteredPath[filteredPath.index(filteredPath.startIndex, offsetBy: "file://".count)])
let filteredPaths = resource.paths.map { path in
if path.hasPrefix("file://") {
return path.replacingOccurrences(of: "file://", with: "")
} else {
return path
}
}
let filteredPath = filteredPaths.first ?? ""
let defaultPreset = TGMediaVideoConversionPreset(rawValue: UInt32(UserDefaults.standard.integer(forKey: "TG_preferredVideoPreset_v0")))
let qualityPreset = MediaQualityPreset(preset: defaultPreset)
let isImage = filteredPath.contains(".jpg")
var isStory = false
let avAsset = AVURLAsset(url: URL(fileURLWithPath: filteredPath))
let avAsset: AVAsset?
if isImage {
avAsset = nil
} else if filteredPaths.count > 1 {
let composition = AVMutableComposition()
var currentTime = CMTime.zero
for path in filteredPaths {
let asset = AVURLAsset(url: URL(fileURLWithPath: path))
let duration = asset.duration
do {
try composition.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: duration),
of: asset,
at: currentTime
)
currentTime = CMTimeAdd(currentTime, duration)
} catch {
}
}
avAsset = composition
} else {
avAsset = AVURLAsset(url: URL(fileURLWithPath: filteredPath))
}
var adjustments: TGVideoEditAdjustments?
var mediaEditorValues: MediaEditorValues?
if let videoAdjustments = resource.adjustments {
@ -500,26 +529,34 @@ public func fetchLocalFileVideoMediaResource(postbox: Postbox, resource: LocalFi
if let values = try? JSONDecoder().decode(MediaEditorValues.self, from: videoAdjustments.data.makeData()) {
mediaEditorValues = values
}
} else if let dict = legacy_unarchiveDeprecated(data: videoAdjustments.data.makeData()) as? [AnyHashable : Any], let legacyAdjustments = TGVideoEditAdjustments(dictionary: dict) {
if alwaysUseModernPipeline && !isImage {
mediaEditorValues = MediaEditorValues(legacyAdjustments: legacyAdjustments, defaultPreset: qualityPreset)
} else {
adjustments = legacyAdjustments
} else {
if let values = try? JSONDecoder().decode(MediaEditorValues.self, from: videoAdjustments.data.makeData()) {
mediaEditorValues = values
} else if let dict = legacy_unarchiveDeprecated(data: videoAdjustments.data.makeData()) as? [AnyHashable : Any], let legacyAdjustments = TGVideoEditAdjustments(dictionary: dict) {
if alwaysUseModernPipeline && !isImage {
mediaEditorValues = MediaEditorValues(legacyAdjustments: legacyAdjustments, defaultPreset: qualityPreset)
} else {
adjustments = legacyAdjustments
}
}
}
}
let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4")
let updatedSize = Atomic<Int64>(value: 0)
if let mediaEditorValues {
let duration: Double = avAsset.duration.seconds
let configuration = recommendedVideoExportConfiguration(values: mediaEditorValues, duration: duration, frameRate: 30.0)
let duration: Double
let subject: MediaEditorVideoExport.Subject
if isImage, let data = try? Data(contentsOf: URL(fileURLWithPath: filteredPath), options: [.mappedRead]), let image = UIImage(data: data) {
duration = 5.0
subject = .image(image: image)
} else {
} else if let avAsset {
duration = avAsset.duration.seconds
subject = .video(asset: avAsset, isStory: isStory)
} else {
return EmptyDisposable
}
let configuration = recommendedVideoExportConfiguration(values: mediaEditorValues, duration: duration, frameRate: 30.0)
let videoExport = MediaEditorVideoExport(postbox: postbox, subject: subject, configuration: configuration, outputPath: tempFile.path)
videoExport.start()

View File

@ -571,12 +571,12 @@ final class StoryItemSetContainerSendMessage {
let controller = component.controller() as? StoryContainerScreen
if let recordedAudioPreview = self.recordedAudioPreview {
if let recordedAudioPreview = self.recordedAudioPreview, case let .audio(audio) = recordedAudioPreview {
self.recordedAudioPreview = nil
let waveformBuffer = recordedAudioPreview.waveform.makeBitstream()
let waveformBuffer = audio.waveform.makeBitstream()
let messages: [EnqueueMessage] = [.message(text: "", attributes: [], inlineStickers: [:], mediaReference: .standalone(media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: recordedAudioPreview.resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: Int64(recordedAudioPreview.fileSize), attributes: [.Audio(isVoice: true, duration: Int(recordedAudioPreview.duration), title: nil, performer: nil, waveform: waveformBuffer)])), threadId: nil, replyToMessageId: nil, replyToStoryId: focusedStoryId, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])]
let messages: [EnqueueMessage] = [.message(text: "", attributes: [], inlineStickers: [:], mediaReference: .standalone(media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: audio.resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: Int64(audio.fileSize), attributes: [.Audio(isVoice: true, duration: Int(audio.duration), title: nil, performer: nil, waveform: waveformBuffer)])), threadId: nil, replyToMessageId: nil, replyToStoryId: focusedStoryId, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])]
let _ = enqueueMessages(account: component.context.account, peerId: peerId, messages: messages).start()
@ -939,7 +939,7 @@ final class StoryItemSetContainerSendMessage {
let resource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max), size: Int64(data.compressedData.count))
component.context.account.postbox.mediaBox.storeResourceData(resource.id, data: data.compressedData)
self.recordedAudioPreview = ChatRecordedMediaPreview(resource: resource, duration: Int32(data.duration), fileSize: Int32(data.compressedData.count), waveform: AudioWaveform(bitstream: waveform, bitsPerSample: 5))
self.recordedAudioPreview = .audio(ChatRecordedMediaPreview.Audio(resource: resource, fileSize: Int32(data.compressedData.count), duration: Int32(data.duration), waveform: AudioWaveform(bitstream: waveform, bitsPerSample: 5)))
view.state?.updated(transition: Transition(animation: .curve(duration: 0.3, curve: .spring)))
}
})

View File

@ -0,0 +1,44 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
swift_library(
name = "VideoMessageCameraScreen",
module_name = "VideoMessageCameraScreen",
srcs = glob([
"Sources/**/*.swift",
]),
copts = [
"-warnings-as-errors",
],
deps = [
"//submodules/AsyncDisplayKit",
"//submodules/Display",
"//submodules/Postbox",
"//submodules/TelegramCore",
"//submodules/SSignalKit/SwiftSignalKit",
"//submodules/ComponentFlow",
"//submodules/Components/ViewControllerComponent",
"//submodules/Components/ComponentDisplayAdapters",
"//submodules/TelegramPresentationData",
"//submodules/AccountContext",
"//submodules/AppBundle",
"//submodules/TelegramStringFormatting",
"//submodules/PresentationDataUtils",
"//submodules/MediaResources",
"//submodules/LocalMediaResources",
"//submodules/ImageCompression",
"//submodules/Camera",
"//submodules/Components/MultilineTextComponent",
"//submodules/Components/BlurredBackgroundComponent",
"//submodules/Components/BundleIconComponent:BundleIconComponent",
"//submodules/TelegramUI/Components/ButtonComponent",
"//submodules/TelegramUI/Components/PlainButtonComponent",
"//submodules/TelegramUI/Components/CameraButtonComponent",
"//submodules/TooltipUI",
"//submodules/TelegramNotices",
"//submodules/DeviceAccess",
"//submodules/TelegramUI/Components/MediaEditor",
],
visibility = [
"//visibility:public",
],
)

View File

@ -0,0 +1,59 @@
import Foundation
import UIKit
import Display
private extension SimpleShapeLayer {
func animateStrokeStart(from: CGFloat, to: CGFloat, duration: Double, delay: Double = 0.0, timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue, removeOnCompletion: Bool = true, completion: ((Bool) -> ())? = nil) {
self.animate(from: NSNumber(value: Float(from)), to: NSNumber(value: Float(to)), keyPath: "strokeStart", timingFunction: timingFunction, duration: duration, delay: delay, removeOnCompletion: removeOnCompletion, completion: completion)
}
func animateStrokeEnd(from: CGFloat, to: CGFloat, duration: Double, delay: Double = 0.0, timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue, removeOnCompletion: Bool = true, completion: ((Bool) -> ())? = nil) {
self.animate(from: NSNumber(value: Float(from)), to: NSNumber(value: Float(to)), keyPath: "strokeEnd", timingFunction: timingFunction, duration: duration, delay: delay, removeOnCompletion: removeOnCompletion, completion: completion)
}
}
final class RecordingProgressView: UIView {
let shapeLayer = SimpleShapeLayer()
var value: CGFloat = 0.0 {
didSet {
if abs(self.shapeLayer.strokeEnd - self.value) >= 0.01 {
if abs(oldValue - self.value) < 0.1 {
let previousStrokeEnd = self.shapeLayer.strokeEnd
self.shapeLayer.strokeEnd = self.value
self.shapeLayer.animateStrokeEnd(from: previousStrokeEnd, to: self.shapeLayer.strokeEnd, duration: abs(previousStrokeEnd - self.value) * 60.0, timingFunction: CAMediaTimingFunctionName.linear.rawValue)
} else {
self.shapeLayer.strokeEnd = self.value
self.shapeLayer.removeAllAnimations()
}
}
}
}
override init(frame: CGRect) {
super.init(frame: frame)
self.shapeLayer.fillColor = UIColor.clear.cgColor
self.shapeLayer.strokeColor = UIColor(white: 1.0, alpha: 0.6).cgColor
self.shapeLayer.lineWidth = 4.0
self.shapeLayer.lineCap = .round
self.shapeLayer.transform = CATransform3DMakeRotation(-.pi / 2.0, 0.0, 0.0, 1.0)
self.shapeLayer.strokeEnd = 0.0
self.layer.addSublayer(self.shapeLayer)
}
required public init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func layoutSubviews() {
super.layoutSubviews()
if self.shapeLayer.frame != self.bounds {
self.shapeLayer.frame = self.bounds
self.shapeLayer.path = CGPath(ellipseIn: self.bounds.insetBy(dx: self.shapeLayer.lineWidth, dy: self.shapeLayer.lineWidth), transform: nil)
}
}
}

View File

@ -0,0 +1,119 @@
import Foundation
import UIKit
import AVFoundation
final class ResultPreviewView: UIView {
let composition: AVComposition
let player: AVPlayer
let playerLayer: AVPlayerLayer
var didPlayToEndTimeObserver: NSObjectProtocol?
var trimRange: Range<Double>? {
didSet {
if let trimRange = self.trimRange {
self.player.currentItem?.forwardPlaybackEndTime = CMTime(seconds: trimRange.upperBound, preferredTimescale: CMTimeScale(1000))
} else {
self.player.currentItem?.forwardPlaybackEndTime = .invalid
}
}
}
init(composition: AVComposition) {
self.composition = composition
self.player = AVPlayer(playerItem: AVPlayerItem(asset: composition))
self.player.isMuted = true
self.playerLayer = AVPlayerLayer(player: self.player)
super.init(frame: .zero)
self.layer.addSublayer(self.playerLayer)
self.didPlayToEndTimeObserver = NotificationCenter.default.addObserver(forName: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: self.player.currentItem, queue: nil, using: { [weak self] notification in
guard let self else {
return
}
var start: Double = 0.0
if let trimRange = self.trimRange {
start = trimRange.lowerBound
}
self.player.pause()
self.seek(to: start, andPlay: true)
})
self.player.play()
}
required public init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
deinit {
if let didPlayToEndTimeObserver = self.didPlayToEndTimeObserver {
NotificationCenter.default.removeObserver(didPlayToEndTimeObserver)
}
}
func updateTrimRange(start: Double, end: Double, updatedEnd: Bool, apply: Bool) {
if !apply {
self.player.pause()
} else {
self.trimRange = start..<end
}
let seekTo: Double
if updatedEnd && !apply {
seekTo = end
} else {
seekTo = start
}
self.seek(to: seekTo, andPlay: apply)
}
func play() {
self.player.play()
}
func pause() {
self.player.pause()
}
private var targetTimePosition: (CMTime, Bool)?
private var updatingTimePosition = false
func seek(to seconds: Double, andPlay play: Bool) {
let position = CMTime(seconds: seconds, preferredTimescale: CMTimeScale(1000.0))
self.targetTimePosition = (position, play)
if !self.updatingTimePosition {
self.updateVideoTimePosition()
}
}
private func updateVideoTimePosition() {
guard let (targetPosition, _) = self.targetTimePosition else {
return
}
self.updatingTimePosition = true
self.player.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero, completionHandler: { [weak self] _ in
if let self {
if let (currentTargetPosition, play) = self.targetTimePosition, currentTargetPosition == targetPosition {
self.updatingTimePosition = false
self.targetTimePosition = nil
if play {
self.player.play()
}
} else {
self.updateVideoTimePosition()
}
}
})
}
override func layoutSubviews() {
self.playerLayer.frame = self.bounds
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "switchcamera_30.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,188 @@
%PDF-1.7
1 0 obj
<< >>
endobj
2 0 obj
<< /Length 3 0 R >>
stream
/DeviceRGB CS
/DeviceRGB cs
q
1.000000 0.000000 -0.000000 1.000000 3.084961 5.584656 cm
0.000000 0.000000 0.000000 scn
9.490515 20.080341 m
9.422689 20.080362 l
8.999260 20.080563 8.679590 20.080715 8.371326 20.006708 c
8.099400 19.941423 7.839443 19.833746 7.600999 19.687628 c
7.330693 19.521984 7.104758 19.295835 6.805490 18.996283 c
6.757546 18.948309 l
5.959852 18.150616 l
5.772345 17.963108 5.716480 17.908821 5.660614 17.864992 c
5.468593 17.714350 5.238950 17.619228 4.996650 17.589970 c
4.926156 17.581457 4.848266 17.580341 4.583089 17.580341 c
4.484859 17.580357 l
3.725908 17.580544 3.223788 17.580666 2.792615 17.474993 c
1.466152 17.149897 0.430476 16.114222 0.105380 14.787757 c
-0.000294 14.356585 -0.000171 13.854465 0.000015 13.095512 c
0.000031 12.997284 l
0.000031 5.465342 l
0.000031 5.436520 l
0.000025 4.620880 0.000020 3.968216 0.043112 3.440796 c
0.087345 2.899416 0.180274 2.431705 0.399492 2.001467 c
0.750868 1.311853 1.311542 0.751179 2.001156 0.399803 c
2.431395 0.180586 2.899104 0.087656 3.440485 0.043423 c
3.967894 0.000332 4.620543 0.000336 5.436161 0.000341 c
5.436225 0.000341 l
5.465031 0.000341 l
18.365032 0.000341 l
18.393837 0.000341 l
18.393900 0.000341 l
19.209520 0.000336 19.862169 0.000332 20.389578 0.043423 c
20.930958 0.087656 21.398668 0.180586 21.828907 0.399803 c
22.518520 0.751179 23.079195 1.311853 23.430571 2.001467 c
23.649788 2.431705 23.742718 2.899416 23.786951 3.440796 c
23.830042 3.968204 23.830038 4.620852 23.830032 5.436470 c
23.830032 5.436536 l
23.830032 5.465342 l
23.830032 12.997283 l
23.830048 13.095503 l
23.830235 13.854461 23.830357 14.356583 23.724682 14.787757 c
23.399588 16.114222 22.363911 17.149897 21.037447 17.474993 c
20.606276 17.580666 20.104155 17.580544 19.345201 17.580357 c
19.246973 17.580341 l
18.981796 17.580341 18.903906 17.581457 18.833412 17.589970 c
18.591112 17.619228 18.361469 17.714350 18.169449 17.864992 c
18.113583 17.908821 18.057718 17.963108 17.870209 18.150616 c
17.072515 18.948311 l
17.024576 18.996279 l
16.725307 19.295834 16.499371 19.521982 16.229063 19.687628 c
15.990620 19.833746 15.730661 19.941423 15.458736 20.006708 c
15.150473 20.080715 14.830803 20.080563 14.407373 20.080362 c
14.339548 20.080341 l
9.490515 20.080341 l
h
8.681808 18.713455 m
8.817650 18.746067 8.969681 18.750341 9.490515 18.750341 c
14.339548 18.750341 l
14.860382 18.750341 15.012413 18.746067 15.148252 18.713455 c
15.284472 18.680752 15.414694 18.626812 15.534140 18.553616 c
15.653254 18.480623 15.763777 18.376143 16.132063 18.007858 c
16.929756 17.210163 l
16.954220 17.185692 l
16.954237 17.185675 l
17.106804 17.033035 17.221756 16.918030 17.348524 16.818577 c
17.731848 16.517857 18.190273 16.327971 18.673967 16.269562 c
18.833941 16.250244 18.996555 16.250284 19.212389 16.250336 c
19.246973 16.250341 l
20.139725 16.250341 20.466656 16.245523 20.720854 16.183224 c
21.565954 15.976102 22.225792 15.316265 22.432913 14.471165 c
22.495213 14.216966 22.500031 13.890034 22.500031 12.997283 c
22.500031 5.465342 l
22.500031 4.614289 22.499514 4.015995 22.461367 3.549101 c
22.423855 3.089968 22.353294 2.816771 22.245531 2.605274 c
22.021667 2.165916 21.664457 1.808706 21.225100 1.584843 c
21.013603 1.477079 20.740406 1.406519 20.281273 1.369007 c
19.814379 1.330860 19.216084 1.330343 18.365032 1.330343 c
5.465031 1.330343 l
4.613979 1.330343 4.015684 1.330860 3.548789 1.369007 c
3.089657 1.406519 2.816460 1.477079 2.604963 1.584843 c
2.165605 1.808706 1.808395 2.165916 1.584531 2.605274 c
1.476768 2.816771 1.406208 3.089968 1.368695 3.549101 c
1.330548 4.015995 1.330031 4.614290 1.330031 5.465342 c
1.330031 12.997284 l
1.330031 13.890034 1.334849 14.216966 1.397150 14.471165 c
1.604271 15.316265 2.264108 15.976102 3.109208 16.183224 c
3.363407 16.245523 3.690338 16.250341 4.583089 16.250341 c
4.617674 16.250336 l
4.617689 16.250336 l
4.833515 16.250284 4.996125 16.250244 5.156096 16.269562 c
5.639788 16.327971 6.098214 16.517857 6.481537 16.818577 c
6.608315 16.918037 6.723272 17.033049 6.875851 17.185703 c
6.900304 17.210163 l
7.697999 18.007858 l
8.066284 18.376143 8.176808 18.480623 8.295923 18.553616 c
8.415368 18.626812 8.545589 18.680752 8.681808 18.713455 c
h
8.819138 12.449797 m
9.606685 13.253181 10.702194 13.750379 11.915030 13.750379 c
13.964883 13.750379 15.683014 12.327185 16.134258 10.415339 c
14.848875 10.415339 l
14.636918 10.415339 14.521129 10.168130 14.656816 10.005297 c
16.622980 7.645809 l
16.722927 7.525869 16.907139 7.525866 17.007090 7.645802 c
18.973408 10.005290 l
19.109104 10.168120 18.993317 10.415339 18.781355 10.415339 c
17.491955 10.415339 l
17.019377 13.067384 14.702655 15.080379 11.915030 15.080379 c
10.330412 15.080379 8.896755 14.428887 7.869370 13.380838 c
7.612269 13.118567 7.616461 12.697534 7.878733 12.440434 c
8.141004 12.183333 8.562037 12.187525 8.819138 12.449797 c
h
6.338119 8.415344 m
5.048842 8.415344 l
4.836884 8.415344 4.721095 8.662557 4.856786 8.825389 c
6.822981 11.184870 l
6.922931 11.304811 7.107148 11.304810 7.207097 11.184867 c
9.173254 8.825387 l
9.308942 8.662554 9.193151 8.415344 8.981194 8.415344 c
7.695821 8.415344 l
8.147092 6.503536 9.865205 5.080379 11.915030 5.080379 c
13.096758 5.080379 14.166923 5.552347 14.949532 6.319569 c
15.211796 6.576675 15.632830 6.572495 15.889937 6.310231 c
16.147045 6.047967 16.142864 5.626933 15.880600 5.369825 c
14.859452 4.368757 13.458792 3.750378 11.915030 3.750378 c
9.127432 3.750378 6.810725 5.763336 6.338119 8.415344 c
h
f*
n
Q
endstream
endobj
3 0 obj
5480
endobj
4 0 obj
<< /Annots []
/Type /Page
/MediaBox [ 0.000000 0.000000 30.000000 30.000000 ]
/Resources 1 0 R
/Contents 2 0 R
/Parent 5 0 R
>>
endobj
5 0 obj
<< /Kids [ 4 0 R ]
/Count 1
/Type /Pages
>>
endobj
6 0 obj
<< /Pages 5 0 R
/Type /Catalog
>>
endobj
xref
0 7
0000000000 65535 f
0000000010 00000 n
0000000034 00000 n
0000005570 00000 n
0000005593 00000 n
0000005766 00000 n
0000005840 00000 n
trailer
<< /ID [ (some) (id) ]
/Root 6 0 R
/Size 7
>>
startxref
5899
%%EOF

View File

@ -29,6 +29,7 @@ import AuthorizationUI
import ChatListUI
import StoryContainerScreen
import ChatMessageNotificationItem
import PhoneNumberFormat
final class UnauthorizedApplicationContext {
let sharedContext: SharedAccountContextImpl
@ -725,7 +726,7 @@ final class AuthorizedApplicationContext {
})
let importableContacts = self.context.sharedContext.contactDataManager?.importable() ?? .single([:])
self.context.account.importableContacts.set(self.context.account.postbox.preferencesView(keys: [PreferencesKeys.contactsSettings])
let optionalImportableContacts = self.context.account.postbox.preferencesView(keys: [PreferencesKeys.contactsSettings])
|> mapToSignal { preferences -> Signal<[DeviceContactNormalizedPhoneNumber: ImportableDeviceContactData], NoError> in
let settings: ContactsSettings = preferences.values[PreferencesKeys.contactsSettings]?.get(ContactsSettings.self) ?? .defaultSettings
if settings.synchronizeContacts {
@ -733,6 +734,11 @@ final class AuthorizedApplicationContext {
} else {
return .single([:])
}
}
self.context.account.importableContacts.set(optionalImportableContacts)
self.context.sharedContext.deviceContactPhoneNumbers.set(optionalImportableContacts
|> map { contacts in
return Set(contacts.keys.map { cleanPhoneNumber($0.rawValue) })
})
let previousTheme = Atomic<PresentationTheme?>(value: nil)

View File

@ -120,6 +120,7 @@ import PeerInfoScreen
import MediaEditorScreen
import WallpaperGalleryScreen
import WallpaperGridScreen
import VideoMessageCameraScreen
public enum ChatControllerPeekActions {
case standard
@ -347,8 +348,8 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
var audioRecorderDisposable: Disposable?
var audioRecorderStatusDisposable: Disposable?
var videoRecorderValue: InstantVideoController?
var videoRecorder = Promise<InstantVideoController?>()
var videoRecorderValue: VideoMessageCameraScreen?
var videoRecorder = Promise<VideoMessageCameraScreen?>()
var videoRecorderDisposable: Disposable?
var buttonKeyboardMessageDisposable: Disposable?
@ -6295,7 +6296,8 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
$0.updatedInputTextPanelState { panelState in
if let videoRecorder = videoRecorder {
if panelState.mediaRecordingState == nil {
return panelState.withUpdatedMediaRecordingState(.video(status: .recording(videoRecorder.audioStatus), isLocked: strongSelf.lockMediaRecordingRequestId == strongSelf.beginMediaRecordingRequestId))
let recordingStatus = videoRecorder.recordingStatus
return panelState.withUpdatedMediaRecordingState(.video(status: .recording(InstantVideoControllerRecordingStatus(micLevel: recordingStatus.micLevel, duration: recordingStatus.duration)), isLocked: strongSelf.lockMediaRecordingRequestId == strongSelf.beginMediaRecordingRequestId))
}
} else {
return panelState.withUpdatedMediaRecordingState(nil)
@ -6325,13 +6327,13 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
strongSelf.present(videoRecorder, in: .window(.root))
if strongSelf.lockMediaRecordingRequestId == strongSelf.beginMediaRecordingRequestId {
videoRecorder.lockVideo()
videoRecorder.lockVideoRecording()
}
}
strongSelf.updateDownButtonVisibility()
if let previousVideoRecorderValue = previousVideoRecorderValue {
previousVideoRecorderValue.dismissVideo()
previousVideoRecorderValue.discardVideo()
}
}
}
@ -15475,68 +15477,59 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
isScheduledMessages = true
}
self.videoRecorder.set(.single(legacyInstantVideoController(theme: self.presentationData.theme, forStory: false, panelFrame: self.view.convert(currentInputPanelFrame, to: nil), context: self.context, peerId: peerId, slowmodeState: !isScheduledMessages ? self.presentationInterfaceState.slowmodeState : nil, hasSchedule: !isScheduledMessages && peerId.namespace != Namespaces.Peer.SecretChat, send: { [weak self] videoController, message in
if let strongSelf = self {
guard let message = message else {
strongSelf.videoRecorder.set(.single(nil))
let _ = peerId
let _ = isScheduledMessages
let controller = VideoMessageCameraScreen(
context: self.context,
updatedPresentationData: self.updatedPresentationData,
inputPanelFrame: currentInputPanelFrame,
completion: { [weak self] message in
guard let self, let videoController = self.videoRecorderValue else {
return
}
let replyMessageSubject = strongSelf.presentationInterfaceState.interfaceState.replyMessageSubject
let replyMessageSubject = self.presentationInterfaceState.interfaceState.replyMessageSubject
let correlationId = Int64.random(in: 0 ..< Int64.max)
let updatedMessage = message
let message = message
.withUpdatedReplyToMessageId(replyMessageSubject?.subjectModel)
.withUpdatedCorrelationId(correlationId)
.withUpdatedAttributes({ attributes in
var attributes = attributes
#if DEBUG
attributes.append(AutoremoveTimeoutMessageAttribute(timeout: viewOnceTimeout, countdownBeginTime: nil))
#endif
return attributes
})
var usedCorrelationId = false
if strongSelf.chatDisplayNode.shouldAnimateMessageTransition, let extractedView = videoController.extractVideoSnapshot() {
if self.chatDisplayNode.shouldAnimateMessageTransition, let extractedView = videoController.extractVideoSnapshot() {
usedCorrelationId = true
strongSelf.chatDisplayNode.messageTransitionNode.add(correlationId: correlationId, source: .videoMessage(ChatMessageTransitionNodeImpl.Source.VideoMessage(view: extractedView)), initiated: { [weak videoController] in
self.chatDisplayNode.messageTransitionNode.add(correlationId: correlationId, source: .videoMessage(ChatMessageTransitionNodeImpl.Source.VideoMessage(view: extractedView)), initiated: { [weak videoController, weak self] in
videoController?.hideVideoSnapshot()
guard let strongSelf = self else {
guard let self else {
return
}
strongSelf.videoRecorder.set(.single(nil))
self.videoRecorder.set(.single(nil))
})
} else {
strongSelf.videoRecorder.set(.single(nil))
self.videoRecorder.set(.single(nil))
}
strongSelf.chatDisplayNode.setupSendActionOnViewUpdate({
if let strongSelf = self {
strongSelf.chatDisplayNode.collapseInput()
self.chatDisplayNode.setupSendActionOnViewUpdate({ [weak self] in
if let self {
self.chatDisplayNode.collapseInput()
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: false, {
$0.updatedInterfaceState { $0.withUpdatedReplyMessageSubject(nil) }
self.updateChatPresentationInterfaceState(animated: true, interactive: false, {
$0.updatedRecordedMediaPreview(nil).updatedInterfaceState { $0.withUpdatedReplyMessageSubject(nil) }
})
}
}, usedCorrelationId ? correlationId : nil)
strongSelf.sendMessages([updatedMessage])
self.sendMessages([message])
}
}, displaySlowmodeTooltip: { [weak self] view, rect in
self?.interfaceInteraction?.displaySlowmodeTooltip(view, rect)
}, presentSchedulePicker: { [weak self] done in
if let strongSelf = self {
strongSelf.presentScheduleTimePicker(completion: { [weak self] time in
if let strongSelf = self {
done(time)
if strongSelf.presentationInterfaceState.subject != .scheduledMessages && time != scheduleWhenOnlineTimestamp {
strongSelf.openScheduledMessages()
}
}
})
)
controller.onResume = { [weak self] in
guard let self else {
return
}
})))
self.resumeMediaRecorder()
}
self.videoRecorder.set(.single(controller))
}
}
}
@ -15590,7 +15583,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
strongSelf.context.account.postbox.mediaBox.storeResourceData(resource.id, data: data.compressedData)
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
$0.updatedRecordedMediaPreview(ChatRecordedMediaPreview(resource: resource, duration: Int32(data.duration), fileSize: Int32(data.compressedData.count), waveform: AudioWaveform(bitstream: waveform, bitsPerSample: 5))).updatedInputTextPanelState { panelState in
$0.updatedRecordedMediaPreview(.audio(ChatRecordedMediaPreview.Audio(resource: resource, fileSize: Int32(data.compressedData.count), duration: Int32(data.duration), waveform: AudioWaveform(bitstream: waveform, bitsPerSample: 5)))).updatedInputTextPanelState { panelState in
return panelState.withUpdatedMediaRecordingState(nil)
}
})
@ -15657,18 +15650,59 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
} else if let videoRecorderValue = self.videoRecorderValue {
if case .send = updatedAction {
self.chatDisplayNode.updateRecordedMediaDeleted(false)
videoRecorderValue.completeVideo()
videoRecorderValue.sendVideoRecording()
} else {
if case .dismiss = updatedAction {
self.chatDisplayNode.updateRecordedMediaDeleted(true)
}
if case .preview = updatedAction, videoRecorderValue.stopVideo() {
self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
$0.updatedInputTextPanelState { panelState in
return panelState.withUpdatedMediaRecordingState(.video(status: .editing, isLocked: false))
}
})
} else {
switch updatedAction {
case .preview, .pause:
if videoRecorderValue.stopVideoRecording() {
let _ = (videoRecorderValue.takenRecordedData()
|> deliverOnMainQueue).startStandalone(next: { [weak self] data in
if let strongSelf = self, let data = data {
if data.duration < 0.5 {
strongSelf.recorderFeedback?.error()
strongSelf.recorderFeedback = nil
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
$0.updatedInputTextPanelState { panelState in
return panelState.withUpdatedMediaRecordingState(nil)
}
})
} else {
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
$0.updatedRecordedMediaPreview(.video(
ChatRecordedMediaPreview.Video(
duration: Int32(data.duration),
frames: data.frames,
framesUpdateTimestamp: data.framesUpdateTimestamp,
trimRange: data.trimRange,
control: ChatRecordedMediaPreview.Video.Control(
updateTrimRange: { [weak self] start, end, updatedEnd, apply in
if let self, let videoRecorderValue = self.videoRecorderValue {
videoRecorderValue.updateTrimRange(start: start, end: end, updatedEnd: updatedEnd, apply: apply)
}
}
)
)
)).updatedInputTextPanelState { panelState in
return panelState.withUpdatedMediaRecordingState(nil)
}
})
strongSelf.recorderFeedback = nil
strongSelf.updateDownButtonVisibility()
}
}
})
self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
$0.updatedInputTextPanelState { panelState in
return panelState.withUpdatedMediaRecordingState(.video(status: .editing, isLocked: false))
}
})
}
default:
self.videoRecorder.set(.single(nil))
}
}
@ -15683,13 +15717,9 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
audioRecorderValue.stop()
self.audioRecorder.set(.single(nil))
}
} else if let videoRecorderValue = self.videoRecorderValue {
if videoRecorderValue.stopVideo() {
self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
$0.updatedInputTextPanelState { panelState in
return panelState.withUpdatedMediaRecordingState(.video(status: .editing, isLocked: false))
}
})
} else if let _ = self.videoRecorderValue {
if let _ = self.presentationInterfaceState.inputTextPanelState.mediaRecordingState {
self.dismissMediaRecorder(pause ? .pause : .preview)
} else {
self.videoRecorder.set(.single(nil))
}
@ -15705,6 +15735,13 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
return panelState.withUpdatedMediaRecordingState(.audio(recorder: audioRecorderValue, isLocked: true))
}.updatedRecordedMediaPreview(nil)
})
} else if let videoRecorderValue = self.videoRecorderValue {
self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
$0.updatedInputTextPanelState { panelState in
let recordingStatus = videoRecorderValue.recordingStatus
return panelState.withUpdatedMediaRecordingState(.video(status: .recording(InstantVideoControllerRecordingStatus(micLevel: recordingStatus.micLevel, duration: recordingStatus.duration)), isLocked: true))
}.updatedRecordedMediaPreview(nil)
})
}
}
@ -15717,10 +15754,16 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
})
}
self.videoRecorderValue?.lockVideo()
self.videoRecorderValue?.lockVideoRecording()
}
func deleteMediaRecording() {
if let _ = self.audioRecorderValue {
self.audioRecorder.set(.single(nil))
} else if let _ = self.videoRecorderValue {
self.videoRecorder.set(.single(nil))
}
self.chatDisplayNode.updateRecordedMediaDeleted(true)
self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
$0.updatedRecordedMediaPreview(nil)
@ -15731,7 +15774,12 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
func sendMediaRecording(silentPosting: Bool? = nil, scheduleTime: Int32? = nil, viewOnce: Bool = false) {
self.chatDisplayNode.updateRecordedMediaDeleted(false)
if let recordedMediaPreview = self.presentationInterfaceState.recordedMediaPreview {
guard let recordedMediaPreview = self.presentationInterfaceState.recordedMediaPreview else {
return
}
switch recordedMediaPreview {
case let .audio(audio):
var isScheduledMessages = false
if case .scheduledMessages = self.presentationInterfaceState.subject {
isScheduledMessages = true
@ -15744,7 +15792,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
return
}
let waveformBuffer = recordedMediaPreview.waveform.makeBitstream()
let waveformBuffer = audio.waveform.makeBitstream()
self.chatDisplayNode.setupSendActionOnViewUpdate({ [weak self] in
if let strongSelf = self {
@ -15763,7 +15811,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
attributes.append(AutoremoveTimeoutMessageAttribute(timeout: viewOnceTimeout, countdownBeginTime: nil))
}
let messages: [EnqueueMessage] = [.message(text: "", attributes: attributes, inlineStickers: [:], mediaReference: .standalone(media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: recordedMediaPreview.resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: Int64(recordedMediaPreview.fileSize), attributes: [.Audio(isVoice: true, duration: Int(recordedMediaPreview.duration), title: nil, performer: nil, waveform: waveformBuffer)])), threadId: self.chatLocation.threadId, replyToMessageId: self.presentationInterfaceState.interfaceState.replyMessageSubject?.subjectModel, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])]
let messages: [EnqueueMessage] = [.message(text: "", attributes: attributes, inlineStickers: [:], mediaReference: .standalone(media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: Int64.random(in: Int64.min ... Int64.max)), partialReference: nil, resource: audio.resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: Int64(audio.fileSize), attributes: [.Audio(isVoice: true, duration: Int(audio.duration), title: nil, performer: nil, waveform: waveformBuffer)])), threadId: self.chatLocation.threadId, replyToMessageId: self.presentationInterfaceState.interfaceState.replyMessageSubject?.subjectModel, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])]
let transformedMessages: [EnqueueMessage]
if let silentPosting = silentPosting {
@ -15786,6 +15834,8 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
})
donateSendMessageIntent(account: self.context.account, sharedContext: self.context.sharedContext, intentContext: .chat, peerIds: [peerId])
case .video:
self.videoRecorderValue?.sendVideoRecording()
}
}

View File

@ -331,7 +331,8 @@ private func extractAssociatedData(
maxReadStoryId: Int32?,
recommendedChannels: RecommendedChannels?,
audioTranscriptionTrial: AudioTranscription.TrialState,
chatThemes: [TelegramTheme]
chatThemes: [TelegramTheme],
deviceContactsNumbers: Set<String>
) -> ChatMessageItemAssociatedData {
var automaticDownloadPeerId: EnginePeer.Id?
var automaticMediaDownloadPeerType: MediaAutoDownloadPeerType = .channel
@ -386,7 +387,7 @@ private func extractAssociatedData(
automaticDownloadPeerId = message.peerId
}
return ChatMessageItemAssociatedData(automaticDownloadPeerType: automaticMediaDownloadPeerType, automaticDownloadPeerId: automaticDownloadPeerId, automaticDownloadNetworkType: automaticDownloadNetworkType, isRecentActions: false, subject: subject, contactsPeerIds: contactsPeerIds, channelDiscussionGroup: channelDiscussionGroup, animatedEmojiStickers: animatedEmojiStickers, additionalAnimatedEmojiStickers: additionalAnimatedEmojiStickers, currentlyPlayingMessageId: currentlyPlayingMessageId, isCopyProtectionEnabled: isCopyProtectionEnabled, availableReactions: availableReactions, defaultReaction: defaultReaction, isPremium: isPremium, accountPeer: accountPeer, alwaysDisplayTranscribeButton: alwaysDisplayTranscribeButton, topicAuthorId: topicAuthorId, hasBots: hasBots, translateToLanguage: translateToLanguage, maxReadStoryId: maxReadStoryId, recommendedChannels: recommendedChannels, audioTranscriptionTrial: audioTranscriptionTrial, chatThemes: chatThemes)
return ChatMessageItemAssociatedData(automaticDownloadPeerType: automaticMediaDownloadPeerType, automaticDownloadPeerId: automaticDownloadPeerId, automaticDownloadNetworkType: automaticDownloadNetworkType, isRecentActions: false, subject: subject, contactsPeerIds: contactsPeerIds, channelDiscussionGroup: channelDiscussionGroup, animatedEmojiStickers: animatedEmojiStickers, additionalAnimatedEmojiStickers: additionalAnimatedEmojiStickers, currentlyPlayingMessageId: currentlyPlayingMessageId, isCopyProtectionEnabled: isCopyProtectionEnabled, availableReactions: availableReactions, defaultReaction: defaultReaction, isPremium: isPremium, accountPeer: accountPeer, alwaysDisplayTranscribeButton: alwaysDisplayTranscribeButton, topicAuthorId: topicAuthorId, hasBots: hasBots, translateToLanguage: translateToLanguage, maxReadStoryId: maxReadStoryId, recommendedChannels: recommendedChannels, audioTranscriptionTrial: audioTranscriptionTrial, chatThemes: chatThemes, deviceContactsNumbers: deviceContactsNumbers)
}
private extension ChatHistoryLocationInput {
@ -1392,6 +1393,9 @@ public final class ChatHistoryListNodeImpl: ListView, ChatHistoryNode, ChatHisto
let chatThemes = self.context.engine.themes.getChatThemes(accountManager: self.context.sharedContext.accountManager)
let deviceContactsNumbers = self.context.sharedContext.deviceContactPhoneNumbers.get()
|> distinctUntilChanged
let messageViewQueue = Queue.mainQueue()
let historyViewTransitionDisposable = combineLatest(queue: messageViewQueue,
historyViewUpdate,
@ -1413,8 +1417,9 @@ public final class ChatHistoryListNodeImpl: ListView, ChatHistoryNode, ChatHisto
maxReadStoryId,
recommendedChannels,
audioTranscriptionTrial,
chatThemes
).startStrict(next: { [weak self] update, chatPresentationData, selectedMessages, updatingMedia, networkType, animatedEmojiStickers, additionalAnimatedEmojiStickers, customChannelDiscussionReadState, customThreadOutgoingReadState, availableReactions, defaultReaction, accountPeer, suggestAudioTranscription, promises, topicAuthorId, translationState, maxReadStoryId, recommendedChannels, audioTranscriptionTrial, chatThemes in
chatThemes,
deviceContactsNumbers
).startStrict(next: { [weak self] update, chatPresentationData, selectedMessages, updatingMedia, networkType, animatedEmojiStickers, additionalAnimatedEmojiStickers, customChannelDiscussionReadState, customThreadOutgoingReadState, availableReactions, defaultReaction, accountPeer, suggestAudioTranscription, promises, topicAuthorId, translationState, maxReadStoryId, recommendedChannels, audioTranscriptionTrial, chatThemes, deviceContactsNumbers in
let (historyAppearsCleared, pendingUnpinnedAllMessages, pendingRemovedMessages, currentlyPlayingMessageIdAndType, scrollToMessageId, chatHasBots, allAdMessages) = promises
func applyHole() {
@ -1573,7 +1578,7 @@ public final class ChatHistoryListNodeImpl: ListView, ChatHistoryNode, ChatHisto
translateToLanguage = languageCode
}
let associatedData = extractAssociatedData(chatLocation: chatLocation, view: view, automaticDownloadNetworkType: networkType, animatedEmojiStickers: animatedEmojiStickers, additionalAnimatedEmojiStickers: additionalAnimatedEmojiStickers, subject: subject, currentlyPlayingMessageId: currentlyPlayingMessageIdAndType?.0, isCopyProtectionEnabled: isCopyProtectionEnabled, availableReactions: availableReactions, defaultReaction: defaultReaction, isPremium: isPremium, alwaysDisplayTranscribeButton: alwaysDisplayTranscribeButton, accountPeer: accountPeer, topicAuthorId: topicAuthorId, hasBots: chatHasBots, translateToLanguage: translateToLanguage, maxReadStoryId: maxReadStoryId, recommendedChannels: recommendedChannels, audioTranscriptionTrial: audioTranscriptionTrial, chatThemes: chatThemes)
let associatedData = extractAssociatedData(chatLocation: chatLocation, view: view, automaticDownloadNetworkType: networkType, animatedEmojiStickers: animatedEmojiStickers, additionalAnimatedEmojiStickers: additionalAnimatedEmojiStickers, subject: subject, currentlyPlayingMessageId: currentlyPlayingMessageIdAndType?.0, isCopyProtectionEnabled: isCopyProtectionEnabled, availableReactions: availableReactions, defaultReaction: defaultReaction, isPremium: isPremium, alwaysDisplayTranscribeButton: alwaysDisplayTranscribeButton, accountPeer: accountPeer, topicAuthorId: topicAuthorId, hasBots: chatHasBots, translateToLanguage: translateToLanguage, maxReadStoryId: maxReadStoryId, recommendedChannels: recommendedChannels, audioTranscriptionTrial: audioTranscriptionTrial, chatThemes: chatThemes, deviceContactsNumbers: deviceContactsNumbers)
let filteredEntries = chatHistoryEntriesForView(
location: chatLocation,

View File

@ -230,12 +230,9 @@ final class ChatViewOnceMessageContextExtractedContentSource: ContextExtractedCo
})
if let messageNode = node as? ChatMessageItemView, let copyContentNode = messageNode.getMessageContextSourceNode(stableId: self.message.stableId) {
let delta: CGFloat = 0.0// (width - 20.0 - messageNode.frame.height)
self.initialAppearanceOffset = CGPoint(x: 0.0, y: -delta)
self.initialAppearanceOffset = CGPoint(x: 0.0, y: width - 20.0 - copyContentNode.frame.height)
copyContentNode.contentNode.backgroundColor = UIColor.red.withAlphaComponent(0.5)
messageNode.frame.origin.y = sourceRect.origin.y// chatNode.frame.height - sourceRect.origin.y - sourceRect.size.height
messageNode.frame.origin.y = sourceRect.origin.y
chatNode.addSubnode(messageNode)
result = ContextControllerTakeViewInfo(containingItem: .node(copyContentNode), contentAreaInScreenSpace: chatNode.convert(chatNode.frameForVisibleArea(), to: nil))

View File

@ -17,6 +17,8 @@ import AudioWaveformNode
import ChatInputPanelNode
import TooltipUI
import TelegramNotices
import ComponentFlow
import MediaScrubberComponent
extension AudioWaveformNode: CustomMediaPlayerScrubbingForegroundNode {
}
@ -67,13 +69,15 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
private let waveformButton: ASButtonNode
let waveformBackgroundNode: ASImageNode
private var viewOnce = false
let scrubber = ComponentView<Empty>()
var viewOnce = false
let viewOnceButton: ChatRecordingViewOnceButtonNode
let recordMoreButton: ChatRecordingViewOnceButtonNode
private let waveformNode: AudioWaveformNode
private let waveformForegroundNode: AudioWaveformNode
let waveformScubberNode: MediaPlayerScrubbingNode
let waveformScrubberNode: MediaPlayerScrubbingNode
private var presentationInterfaceState: ChatPresentationInterfaceState?
@ -129,7 +133,7 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
self.waveformForegroundNode = AudioWaveformNode()
self.waveformForegroundNode.isLayerBacked = true
self.waveformScubberNode = MediaPlayerScrubbingNode(content: .custom(backgroundNode: self.waveformNode, foregroundContentNode: self.waveformForegroundNode))
self.waveformScrubberNode = MediaPlayerScrubbingNode(content: .custom(backgroundNode: self.waveformNode, foregroundContentNode: self.waveformForegroundNode))
self.durationLabel = MediaPlayerTimeTextNode(textColor: theme.chat.inputPanel.actionControlForegroundColor)
self.durationLabel.alignment = .right
@ -156,7 +160,7 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
self.deleteButton.addSubnode(self.binNode)
self.addSubnode(self.waveformBackgroundNode)
self.addSubnode(self.sendButton)
self.addSubnode(self.waveformScubberNode)
self.addSubnode(self.waveformScrubberNode)
self.addSubnode(self.playButton)
self.addSubnode(self.durationLabel)
self.addSubnode(self.waveformButton)
@ -202,6 +206,8 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
viewForOverlayContent.addSubnode(self.viewOnceButton)
viewForOverlayContent.addSubnode(self.recordMoreButton)
}
self.view.disablesInteractiveTransitionGestureRecognizer = true
}
private func maybePresentViewOnceTooltip() {
@ -242,35 +248,97 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
}
self.presentationInterfaceState = interfaceState
if let recordedMediaPreview = interfaceState.recordedMediaPreview, updateWaveform {
self.waveformNode.setup(color: interfaceState.theme.chat.inputPanel.actionControlForegroundColor.withAlphaComponent(0.5), gravity: .center, waveform: recordedMediaPreview.waveform)
self.waveformForegroundNode.setup(color: interfaceState.theme.chat.inputPanel.actionControlForegroundColor, gravity: .center, waveform: recordedMediaPreview.waveform)
if self.mediaPlayer != nil {
self.mediaPlayer?.pause()
}
if let context = self.context {
let mediaManager = context.sharedContext.mediaManager
let mediaPlayer = MediaPlayer(audioSessionManager: mediaManager.audioSession, postbox: context.account.postbox, userLocation: .other, userContentType: .audio, resourceReference: .standalone(resource: recordedMediaPreview.resource), streamable: .none, video: false, preferSoftwareDecoding: false, enableSound: true, fetchAutomatically: true)
mediaPlayer.actionAtEnd = .action { [weak mediaPlayer] in
mediaPlayer?.seek(timestamp: 0.0)
if let recordedMediaPreview = interfaceState.recordedMediaPreview, let context = self.context {
switch recordedMediaPreview {
case let .audio(audio):
self.waveformButton.isHidden = false
self.waveformBackgroundNode.isHidden = false
self.waveformForegroundNode.isHidden = false
self.waveformScrubberNode.isHidden = false
self.playButton.isHidden = false
self.durationLabel.isHidden = false
if let view = self.scrubber.view, view.superview != nil {
view.removeFromSuperview()
}
self.mediaPlayer = mediaPlayer
self.durationLabel.defaultDuration = Double(recordedMediaPreview.duration)
self.durationLabel.status = mediaPlayer.status
self.waveformScubberNode.status = mediaPlayer.status
self.statusDisposable.set((mediaPlayer.status
if updateWaveform {
self.waveformNode.setup(color: interfaceState.theme.chat.inputPanel.actionControlForegroundColor.withAlphaComponent(0.5), gravity: .center, waveform: audio.waveform)
self.waveformForegroundNode.setup(color: interfaceState.theme.chat.inputPanel.actionControlForegroundColor, gravity: .center, waveform: audio.waveform)
if self.mediaPlayer != nil {
self.mediaPlayer?.pause()
}
let mediaManager = context.sharedContext.mediaManager
let mediaPlayer = MediaPlayer(audioSessionManager: mediaManager.audioSession, postbox: context.account.postbox, userLocation: .other, userContentType: .audio, resourceReference: .standalone(resource: audio.resource), streamable: .none, video: false, preferSoftwareDecoding: false, enableSound: true, fetchAutomatically: true)
mediaPlayer.actionAtEnd = .action { [weak mediaPlayer] in
mediaPlayer?.seek(timestamp: 0.0)
}
self.mediaPlayer = mediaPlayer
self.durationLabel.defaultDuration = Double(audio.duration)
self.durationLabel.status = mediaPlayer.status
self.waveformScrubberNode.status = mediaPlayer.status
self.statusDisposable.set((mediaPlayer.status
|> deliverOnMainQueue).startStrict(next: { [weak self] status in
if let strongSelf = self {
switch status.status {
if let strongSelf = self {
switch status.status {
case .playing, .buffering(_, true, _, _):
strongSelf.playPauseIconNode.enqueueState(.pause, animated: true)
default:
strongSelf.playPauseIconNode.enqueueState(.play, animated: true)
}
}
}))
}
case let .video(video):
self.waveformButton.isHidden = true
self.waveformBackgroundNode.isHidden = true
self.waveformForegroundNode.isHidden = true
self.waveformScrubberNode.isHidden = true
self.playButton.isHidden = true
self.durationLabel.isHidden = true
let scrubberSize = self.scrubber.update(
transition: .immediate,
component: AnyComponent(
MediaScrubberComponent(
context: context,
style: .videoMessage,
generationTimestamp: 0,
position: 0,
minDuration: 1.0,
maxDuration: 60.0,
isPlaying: false,
tracks: [
MediaScrubberComponent.Track(
id: 0,
content: .video(frames: video.frames, framesUpdateTimestamp: video.framesUpdateTimestamp),
duration: Double(video.duration),
trimRange: video.trimRange,
offset: nil,
isMain: true
)
],
positionUpdated: { _, _ in },
trackTrimUpdated: { _, start, end, updatedEnd, apply in
video.control.updateTrimRange(start, end, updatedEnd, apply)
},
trackOffsetUpdated: { _, _, _ in },
trackLongPressed: { _, _ in }
)
),
environment: {},
forceUpdate: false,
containerSize: CGSize(width: width - leftInset - rightInset - 45.0 * 2.0, height: 33.0)
)
if let view = self.scrubber.view {
if view.superview == nil {
self.view.addSubview(view)
}
}))
view.frame = CGRect(origin: CGPoint(x: leftInset + 45.0, y: 7.0 - UIScreenPixel), size: scrubberSize)
}
}
}
}
@ -327,7 +395,7 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
let waveformBackgroundFrame = CGRect(origin: CGPoint(x: leftInset + 45.0, y: 7.0 - UIScreenPixel), size: CGSize(width: width - leftInset - rightInset - 90.0, height: 33.0))
transition.updateFrame(node: self.waveformBackgroundNode, frame: waveformBackgroundFrame)
transition.updateFrame(node: self.waveformButton, frame: CGRect(origin: CGPoint(x: leftInset + 45.0, y: 0.0), size: CGSize(width: width - leftInset - rightInset - 90.0, height: panelHeight)))
transition.updateFrame(node: self.waveformScubberNode, frame: CGRect(origin: CGPoint(x: leftInset + 45.0 + 35.0, y: 7.0 + floor((33.0 - 13.0) / 2.0)), size: CGSize(width: width - leftInset - rightInset - 90.0 - 45.0 - 40.0, height: 13.0)))
transition.updateFrame(node: self.waveformScrubberNode, frame: CGRect(origin: CGPoint(x: leftInset + 45.0 + 35.0, y: 7.0 + floor((33.0 - 13.0) / 2.0)), size: CGSize(width: width - leftInset - rightInset - 90.0 - 45.0 - 40.0, height: 13.0)))
transition.updateFrame(node: self.durationLabel, frame: CGRect(origin: CGPoint(x: width - rightInset - 90.0 - 4.0, y: 15.0), size: CGSize(width: 35.0, height: 20.0)))
prevInputPanelNode?.frame = CGRect(origin: .zero, size: CGSize(width: width, height: panelHeight))
@ -369,6 +437,11 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
prevTextInputPanelNode.actionButtons.micButton.animateOut(true)
if let view = self.scrubber.view {
view.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15)
view.layer.animatePosition(from: CGPoint(x: 0.0, y: 64.0), to: .zero, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
}
self.deleteButton.layer.animateScale(from: 0.3, to: 1.0, duration: 0.15)
self.deleteButton.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15)
@ -377,8 +450,8 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
self.durationLabel.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3, delay: 0.1)
self.waveformScubberNode.layer.animateScaleY(from: 0.1, to: 1.0, duration: 0.3, delay: 0.1)
self.waveformScubberNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, delay: 0.1)
self.waveformScrubberNode.layer.animateScaleY(from: 0.1, to: 1.0, duration: 0.3, delay: 0.1)
self.waveformScrubberNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, delay: 0.1)
self.waveformBackgroundNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15)
self.waveformBackgroundNode.layer.animateFrame(
@ -412,6 +485,7 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
}
@objc func sendPressed() {
self.viewOnce = false
self.tooltipController?.dismiss()
self.interfaceInteraction?.sendRecordedMedia(false, self.viewOnce)

View File

@ -47,7 +47,7 @@ final class ChatTextInputActionButtonsNode: ASDisplayNode {
let strings = presentationInterfaceState.strings
self.strings = strings
self.micButton = ChatTextInputMediaRecordingButton(context: context, theme: theme, strings: strings, presentController: presentController)
self.micButton = ChatTextInputMediaRecordingButton(context: context, theme: theme, pause: true, strings: strings, presentController: presentController)
self.sendContainerNode = ASDisplayNode()
self.sendContainerNode.layer.allowsGroupOpacity = true

View File

@ -2054,13 +2054,14 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
self.actionButtons.micButton.audioRecorder = recorder
audioRecordingTimeNode.audioRecorder = recorder
case let .video(status, _):
let hadVideoRecorder = self.actionButtons.micButton.videoRecordingStatus != nil
if !hadVideoRecorder, isLocked {
self.actionButtons.micButton.lock()
}
switch status {
case let .recording(recordingStatus):
audioRecordingTimeNode.videoRecordingStatus = recordingStatus
self.actionButtons.micButton.videoRecordingStatus = recordingStatus
if isLocked {
audioRecordingCancelIndicator.layer.animateAlpha(from: audioRecordingCancelIndicator.alpha, to: 0, duration: 0.15, delay: 0, removeOnCompletion: false)
}
case .editing:
audioRecordingTimeNode.videoRecordingStatus = nil
self.actionButtons.micButton.videoRecordingStatus = nil
@ -2561,46 +2562,64 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
if let prevPreviewInputPanelNode = self.prevInputPanelNode as? ChatRecordingPreviewInputPanelNode {
self.prevInputPanelNode = nil
if prevPreviewInputPanelNode.viewOnceButton.alpha > 0.0 {
if let snapshotView = prevPreviewInputPanelNode.viewOnceButton.view.snapshotContentTree() {
snapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in
snapshotView.removeFromSuperview()
})
snapshotView.layer.animateScale(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false)
self.viewForOverlayContent?.addSubview(snapshotView)
if !prevPreviewInputPanelNode.viewOnceButton.isHidden {
self.viewOnce = prevPreviewInputPanelNode.viewOnce
self.viewOnceButton.update(isSelected: prevPreviewInputPanelNode.viewOnce, animated: false)
self.viewOnceButton.layer.animatePosition(from: prevPreviewInputPanelNode.viewOnceButton.position, to: self.viewOnceButton.position, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, completion: { _ in
})
}
let animateOutPreviewButton: (ASDisplayNode) -> Void = { button in
if button.alpha > 0.0 {
if let snapshotView = button.view.snapshotContentTree() {
snapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in
snapshotView.removeFromSuperview()
})
snapshotView.layer.animateScale(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false)
self.viewForOverlayContent?.addSubview(snapshotView)
}
}
}
animateOutPreviewButton(prevPreviewInputPanelNode.viewOnceButton)
animateOutPreviewButton(prevPreviewInputPanelNode.recordMoreButton)
prevPreviewInputPanelNode.gestureRecognizer?.isEnabled = false
prevPreviewInputPanelNode.isUserInteractionEnabled = false
if self.isMediaDeleted {
func animatePosition(for previewSubnode: ASDisplayNode) {
previewSubnode.layer.animatePosition(
from: previewSubnode.position,
to: CGPoint(x: leftMenuInset.isZero ? previewSubnode.position.x - 20 : leftMenuInset + previewSubnode.frame.width / 2.0, y: previewSubnode.position.y),
func animatePosition(for previewLayer: CALayer) {
previewLayer.animatePosition(
from: previewLayer.position,
to: CGPoint(x: leftMenuInset.isZero ? previewLayer.position.x - 20 : leftMenuInset + previewLayer.frame.width / 2.0, y: previewLayer.position.y),
duration: 0.15
)
}
animatePosition(for: prevPreviewInputPanelNode.waveformBackgroundNode)
animatePosition(for: prevPreviewInputPanelNode.waveformScubberNode)
animatePosition(for: prevPreviewInputPanelNode.durationLabel)
animatePosition(for: prevPreviewInputPanelNode.playButton)
animatePosition(for: prevPreviewInputPanelNode.waveformBackgroundNode.layer)
animatePosition(for: prevPreviewInputPanelNode.waveformScrubberNode.layer)
animatePosition(for: prevPreviewInputPanelNode.durationLabel.layer)
animatePosition(for: prevPreviewInputPanelNode.playButton.layer)
if let view = prevPreviewInputPanelNode.scrubber.view {
animatePosition(for: view.layer)
}
}
func animateAlpha(for previewSubnode: ASDisplayNode) {
previewSubnode.layer.animateAlpha(
func animateAlpha(for previewLayer: CALayer) {
previewLayer.animateAlpha(
from: 1.0,
to: 0.0,
duration: 0.15,
removeOnCompletion: false
)
}
animateAlpha(for: prevPreviewInputPanelNode.waveformBackgroundNode)
animateAlpha(for: prevPreviewInputPanelNode.waveformScubberNode)
animateAlpha(for: prevPreviewInputPanelNode.durationLabel)
animateAlpha(for: prevPreviewInputPanelNode.playButton)
animateAlpha(for: prevPreviewInputPanelNode.waveformBackgroundNode.layer)
animateAlpha(for: prevPreviewInputPanelNode.waveformScrubberNode.layer)
animateAlpha(for: prevPreviewInputPanelNode.durationLabel.layer)
animateAlpha(for: prevPreviewInputPanelNode.playButton.layer)
if let view = prevPreviewInputPanelNode.scrubber.view {
animateAlpha(for: view.layer)
}
let binNode = prevPreviewInputPanelNode.binNode
self.animatingBinNode = binNode
@ -2632,6 +2651,7 @@ class ChatTextInputPanelNode: ChatInputPanelNode, ASEditableTextNodeDelegate, Ch
}
if self.isMediaDeleted && !isRecording {
self.attachmentButton.layer.animateAlpha(from: 0.0, to: 0, duration: 0.01, delay: 0.0, removeOnCompletion: false)
binNode.completion = dismissBin
binNode.play()
} else {

View File

@ -166,6 +166,7 @@ public final class SharedAccountContextImpl: SharedAccountContext {
public let enablePreloads = Promise<Bool>()
public let hasPreloadBlockingContent = Promise<Bool>(false)
public let deviceContactPhoneNumbers = Promise<Set<String>>(Set())
private var accountUserInterfaceInUseContexts: [AccountRecordId: AccountUserInterfaceInUseContext] = [:]