mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-12-23 22:55:00 +00:00
Improve QR auth and sessions UI
This commit is contained in:
@@ -146,14 +146,117 @@ public final class MediaPlayerNode: ASDisplayNode {
|
||||
|
||||
private func poll(completion: @escaping (PollStatus) -> Void) {
|
||||
if let (takeFrameQueue, takeFrame) = self.takeFrameAndQueue, let videoLayer = self.videoLayer, let (timebase, _, _, _) = self.state {
|
||||
let layerRef = Unmanaged.passRetained(videoLayer)
|
||||
let layerTime = CMTimeGetSeconds(CMTimebaseGetTime(timebase))
|
||||
let rate = CMTimebaseGetRate(timebase)
|
||||
|
||||
struct PollState {
|
||||
var numFrames: Int
|
||||
var maxTakenTime: Double
|
||||
}
|
||||
|
||||
var loop: ((PollState) -> Void)?
|
||||
let loopImpl: (PollState) -> Void = { [weak self] state in
|
||||
assert(Queue.mainQueue().isCurrent())
|
||||
|
||||
guard let strongSelf = self, let videoLayer = strongSelf.videoLayer else {
|
||||
return
|
||||
}
|
||||
if !videoLayer.isReadyForMoreMediaData {
|
||||
completion(.delay(max(1.0 / 30.0, state.maxTakenTime - layerTime)))
|
||||
return
|
||||
}
|
||||
|
||||
var state = state
|
||||
|
||||
takeFrameQueue.async {
|
||||
switch takeFrame() {
|
||||
case let .restoreState(frames, atTime):
|
||||
Queue.mainQueue().async {
|
||||
guard let strongSelf = self, let videoLayer = strongSelf.videoLayer else {
|
||||
return
|
||||
}
|
||||
videoLayer.flush()
|
||||
}
|
||||
for i in 0 ..< frames.count {
|
||||
let frame = frames[i]
|
||||
let frameTime = CMTimeGetSeconds(frame.position)
|
||||
state.maxTakenTime = frameTime
|
||||
let attachments = CMSampleBufferGetSampleAttachmentsArray(frame.sampleBuffer, createIfNecessary: true)! as NSArray
|
||||
let dict = attachments[0] as! NSMutableDictionary
|
||||
if i == 0 {
|
||||
CMSetAttachment(frame.sampleBuffer, key: kCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding as NSString, value: kCFBooleanTrue as AnyObject, attachmentMode: kCMAttachmentMode_ShouldPropagate)
|
||||
CMSetAttachment(frame.sampleBuffer, key: kCMSampleBufferAttachmentKey_EndsPreviousSampleDuration as NSString, value: kCFBooleanTrue as AnyObject, attachmentMode: kCMAttachmentMode_ShouldPropagate)
|
||||
}
|
||||
if CMTimeCompare(frame.position, atTime) < 0 {
|
||||
dict.setValue(kCFBooleanTrue as AnyObject, forKey: kCMSampleAttachmentKey_DoNotDisplay as NSString as String)
|
||||
} else if CMTimeCompare(frame.position, atTime) == 0 {
|
||||
dict.setValue(kCFBooleanTrue as AnyObject, forKey: kCMSampleAttachmentKey_DisplayImmediately as NSString as String)
|
||||
dict.setValue(kCFBooleanTrue as AnyObject, forKey: kCMSampleBufferAttachmentKey_EndsPreviousSampleDuration as NSString as String)
|
||||
}
|
||||
Queue.mainQueue().async {
|
||||
guard let strongSelf = self, let videoLayer = strongSelf.videoLayer else {
|
||||
return
|
||||
}
|
||||
videoLayer.enqueue(frame.sampleBuffer)
|
||||
}
|
||||
}
|
||||
Queue.mainQueue().async {
|
||||
loop?(state)
|
||||
}
|
||||
case let .frame(frame):
|
||||
state.numFrames += 1
|
||||
let frameTime = CMTimeGetSeconds(frame.position)
|
||||
if frame.resetDecoder {
|
||||
Queue.mainQueue().async {
|
||||
guard let strongSelf = self, let videoLayer = strongSelf.videoLayer else {
|
||||
return
|
||||
}
|
||||
videoLayer.flush()
|
||||
}
|
||||
}
|
||||
|
||||
if frame.decoded && frameTime < layerTime {
|
||||
Queue.mainQueue().async {
|
||||
loop?(state)
|
||||
}
|
||||
} else {
|
||||
state.maxTakenTime = frameTime
|
||||
Queue.mainQueue().async {
|
||||
guard let strongSelf = self, let videoLayer = strongSelf.videoLayer else {
|
||||
return
|
||||
}
|
||||
videoLayer.enqueue(frame.sampleBuffer)
|
||||
}
|
||||
|
||||
Queue.mainQueue().async {
|
||||
loop?(state)
|
||||
}
|
||||
}
|
||||
case .skipFrame:
|
||||
Queue.mainQueue().async {
|
||||
loop?(state)
|
||||
}
|
||||
case .noFrames:
|
||||
DispatchQueue.main.async {
|
||||
completion(.finished)
|
||||
}
|
||||
case .finished:
|
||||
DispatchQueue.main.async {
|
||||
completion(.finished)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
loop = loopImpl
|
||||
loop?(PollState(numFrames: 0, maxTakenTime: layerTime + 0.1))
|
||||
|
||||
/*let layerRef = Unmanaged.passRetained(videoLayer)
|
||||
takeFrameQueue.async {
|
||||
let status: PollStatus
|
||||
do {
|
||||
var numFrames = 0
|
||||
let layer = layerRef.takeUnretainedValue()
|
||||
let layerTime = CMTimeGetSeconds(CMTimebaseGetTime(timebase))
|
||||
let rate = CMTimebaseGetRate(timebase)
|
||||
|
||||
var maxTakenTime = layerTime + 0.1
|
||||
var finised = false
|
||||
loop: while true {
|
||||
@@ -230,7 +333,7 @@ public final class MediaPlayerNode: ASDisplayNode {
|
||||
|
||||
completion(status)
|
||||
}
|
||||
}
|
||||
}*/
|
||||
}
|
||||
}
|
||||
|
||||
@@ -286,10 +389,6 @@ public final class MediaPlayerNode: ASDisplayNode {
|
||||
|
||||
strongSelf.layer.addSublayer(videoLayer)
|
||||
|
||||
/*let testLayer = RuntimeUtils.makeLayerHostCopy(videoLayer.sublayers![0].sublayers![0])*/
|
||||
//testLayer.frame = CGRect(origin: CGPoint(x: -500.0, y: -300.0), size: CGSize(width: 60.0, height: 60.0))
|
||||
//strongSelf.layer.addSublayer(testLayer)
|
||||
|
||||
strongSelf.updateState()
|
||||
}
|
||||
}
|
||||
@@ -302,13 +401,11 @@ public final class MediaPlayerNode: ASDisplayNode {
|
||||
|
||||
if let (takeFrameQueue, _) = self.takeFrameAndQueue {
|
||||
if let videoLayer = self.videoLayer {
|
||||
takeFrameQueue.async {
|
||||
videoLayer.flushAndRemoveImage()
|
||||
|
||||
Queue.mainQueue().after(1.0, {
|
||||
videoLayer.flushAndRemoveImage()
|
||||
|
||||
takeFrameQueue.after(1.0, {
|
||||
videoLayer.flushAndRemoveImage()
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user