Update PlayerV2

This commit is contained in:
Isaac 2025-01-14 22:49:26 +08:00
parent 8aeb7f8143
commit 4d66f53230
17 changed files with 172 additions and 33 deletions

View File

@ -13682,3 +13682,6 @@ Sorry for the inconvenience.";
"Gift.View.Context.Share" = "Share";
"Gift.View.Context.CopyLink" = "Copy Link";
"Gift.View.Context.Transfer" = "Transfer";
"ChatListFilter.NameEnableAnimations" = "Enable Animations";
"ChatListFilter.NameDisableAnimations" = "Disable Animations";

View File

@ -378,10 +378,9 @@ private enum ChatListFilterPresetEntry: ItemListNodeEntry {
case .screenHeader:
return ChatListFilterSettingsHeaderItem(context: arguments.context, theme: presentationData.theme, text: "", animation: .newFolder, sectionId: self.section)
case let .nameHeader(title, enableAnimations):
//TODO:localize
var actionText: String?
if let enableAnimations {
actionText = enableAnimations ? "Disable Animations" : "Enable Animations"
actionText = enableAnimations ? presentationData.strings.ChatListFilter_NameDisableAnimations : presentationData.strings.ChatListFilter_NameEnableAnimations
}
return ItemListSectionHeaderItem(presentationData: presentationData, text: title, actionText: actionText, action: {
arguments.toggleNameAnimations()

View File

@ -36,6 +36,40 @@
return [[FFMpegAVCodec alloc] initWithImpl:codec];
}
}
} else if (preferHardwareAccelerationCapable && codecId == AV_CODEC_ID_H264) {
void *codecIterationState = nil;
while (true) {
AVCodec const *codec = av_codec_iterate(&codecIterationState);
if (!codec) {
break;
}
if (!av_codec_is_decoder(codec)) {
continue;
}
if (codec->id != codecId) {
continue;
}
if (strncmp(codec->name, "h264", 2) == 0) {
return [[FFMpegAVCodec alloc] initWithImpl:codec];
}
}
} else if (preferHardwareAccelerationCapable && codecId == AV_CODEC_ID_HEVC) {
void *codecIterationState = nil;
while (true) {
AVCodec const *codec = av_codec_iterate(&codecIterationState);
if (!codec) {
break;
}
if (!av_codec_is_decoder(codec)) {
continue;
}
if (codec->id != codecId) {
continue;
}
if (strncmp(codec->name, "hevc", 2) == 0) {
return [[FFMpegAVCodec alloc] initWithImpl:codec];
}
}
}
AVCodec const *codec = avcodec_find_decoder(codecId);

View File

@ -423,7 +423,12 @@ public final class LocationMapNode: ASDisplayNode, MKMapViewDelegateTarget {
self.mapView?.setRegion(region, animated: animated)
} else {
let mapRect = MKMapRect(region: region)
self.mapView?.setVisibleMapRect(mapRect, edgePadding: UIEdgeInsets(top: offset.y + self.topPadding, left: offset.x, bottom: 0.0, right: 0.0), animated: animated)
var effectiveTopOffset: CGFloat = offset.y
if #available(iOS 18.0, *) {
} else {
effectiveTopOffset += self.topPadding
}
self.mapView?.setVisibleMapRect(mapRect, edgePadding: UIEdgeInsets(top: effectiveTopOffset, left: offset.x, bottom: 0.0, right: 0.0), animated: animated)
}
self.ignoreRegionChanges = false

View File

@ -45,7 +45,7 @@ private func FFMpegLookaheadReader_readPacketCallback(userData: UnsafeMutableRaw
memcpy(buffer, bytes, fetchedData.count)
}
let fetchedCount = Int32(fetchedData.count)
print("Fetched from \(context.readingOffset) (\(fetchedCount) bytes)")
//print("Fetched from \(context.readingOffset) (\(fetchedCount) bytes)")
context.setReadingOffset(offset: context.readingOffset + Int64(fetchedCount))
if fetchedCount == 0 {
return FFMPEG_CONSTANT_AVERROR_EOF

View File

@ -201,6 +201,7 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
private var baseRate: Double = 1.0
private var isSoundEnabled: Bool
private var isMuted: Bool
private var isAmbientMode: Bool
private var seekId: Int = 0
private var seekTimestamp: Double = 0.0
@ -251,6 +252,7 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
self.isSoundEnabled = enableSound
self.isMuted = soundMuted
self.isAmbientMode = ambient
self.baseRate = baseRate
self.renderSynchronizer = AVSampleBufferRenderSynchronizer()
@ -317,7 +319,7 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
if self.isSoundEnabled && self.hasSound {
if self.audioSessionDisposable == nil {
self.audioSessionDisposable = self.audioSessionManager.push(params: ManagedAudioSessionClientParams(
audioSessionType: .play(mixWithOthers: false),
audioSessionType: self.isAmbientMode ? .ambient : .play(mixWithOthers: false),
activateImmediately: false,
manualActivate: { [weak self] control in
control.setupAndActivate(synchronous: false, { state in
@ -775,6 +777,22 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
}
public func continueWithOverridingAmbientMode(isAmbient: Bool) {
if self.isAmbientMode != isAmbient {
self.isAmbientMode = isAmbient
self.hasAudioSession = false
self.updateInternalState()
self.audioSessionDisposable?.dispose()
self.audioSessionDisposable = nil
let currentTimestamp: CMTime
if let pendingSeekTimestamp = self.pendingSeekTimestamp {
currentTimestamp = CMTimeMakeWithSeconds(pendingSeekTimestamp, preferredTimescale: 44000)
} else {
currentTimestamp = self.renderSynchronizer.currentTime()
}
self.seek(timestamp: currentTimestamp.seconds, play: nil)
}
}
public func continuePlayingWithoutSound(seek: MediaPlayerSeek) {
@ -877,6 +895,8 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
self.loadedPartsMediaData.with { [weak self] loadedPartsMediaData in
loadedPartsMediaData.parts.removeAll()
loadedPartsMediaData.seekFromMinTimestamp = timestamp
loadedPartsMediaData.directMediaData = nil
loadedPartsMediaData.directReaderId = nil
Queue.mainQueue().async {
guard let self else {
@ -1050,6 +1070,9 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
continue outer
}
}
/*if isVideo {
print("Enqueue video \(CMSampleBufferGetPresentationTimeStamp(sampleBuffer).value)")
}*/
/*if !isVideo {
print("Enqueue audio \(CMSampleBufferGetPresentationTimeStamp(sampleBuffer).value) next: \(CMSampleBufferGetPresentationTimeStamp(sampleBuffer).value + 1024)")
}*/

View File

@ -163,6 +163,10 @@ public final class FFMpegMediaDataReaderV2: MediaDataReader {
var passthroughDecoder = true
var useHardwareAcceleration = false
if (codecName == "h264" || codecName == "hevc") {
passthroughDecoder = false
useHardwareAcceleration = true
}
if (codecName == "av1" || codecName == "av01") {
passthroughDecoder = false
useHardwareAcceleration = internal_isHardwareAv1Supported

View File

@ -303,6 +303,18 @@ public final class ChatInlineSearchResultsListComponent: Component {
}
}
override public func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
guard let result = super.hitTest(point, with: event) else {
return nil
}
if result === self.listNode.view {
if self.backgroundColor == nil {
return nil
}
}
return result
}
func update(component: ChatInlineSearchResultsListComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: ComponentTransition) -> CGSize {
self.isUpdating = true
defer {

View File

@ -342,7 +342,7 @@ public final class MediaRecordingPanelComponent: Component {
}
self.cancelIconView.tintColor = UIColor(white: 1.0, alpha: 0.3)
self.vibrancyCancelIconView.tintColor = .white
self.vibrancyCancelIconView.tintColor = .black
let cancelTextSize = self.cancelText.update(
transition: .immediate,
@ -352,7 +352,7 @@ public final class MediaRecordingPanelComponent: Component {
)
let _ = self.vibrancyCancelText.update(
transition: .immediate,
component: AnyComponent(Text(text: component.strings.Conversation_SlideToCancel, font: Font.regular(15.0), color: .white)),
component: AnyComponent(Text(text: component.strings.Conversation_SlideToCancel, font: Font.regular(15.0), color: .black)),
environment: {},
containerSize: CGSize(width: max(30.0, availableSize.width - 100.0), height: 44.0)
)

View File

@ -457,7 +457,7 @@ public final class MessageInputPanelComponent: Component {
public final class View: UIView {
private let fieldBackgroundView: BlurredBackgroundView
private let vibrancyEffectView: UIVisualEffectView
private let fieldBackgroundTint: UIView
private let gradientView: UIImageView
private let bottomGradientView: UIView
@ -522,12 +522,16 @@ public final class MessageInputPanelComponent: Component {
}
override init(frame: CGRect) {
self.fieldBackgroundView = BlurredBackgroundView(color: UIColor(white: 0.0, alpha: 0.5), enableBlur: true)
self.vibrancyEffectView = UIVisualEffectView(effect: UIVibrancyEffect(blurEffect: UIBlurEffect(style: .dark)))
self.fieldBackgroundView = BlurredBackgroundView(color: nil, enableBlur: true)
self.fieldBackgroundTint = UIView()
self.fieldBackgroundTint.backgroundColor = UIColor(white: 1.0, alpha: 0.1)
self.mediaRecordingVibrancyContainer = UIView()
self.vibrancyEffectView.contentView.addSubview(self.mediaRecordingVibrancyContainer)
if let filter = CALayer.luminanceToAlpha() {
self.mediaRecordingVibrancyContainer.backgroundColor = .white
self.mediaRecordingVibrancyContainer.layer.filters = [filter]
}
self.fieldBackgroundTint.mask = self.mediaRecordingVibrancyContainer
self.gradientView = UIImageView()
self.bottomGradientView = UIView()
@ -538,8 +542,8 @@ public final class MessageInputPanelComponent: Component {
self.addSubview(self.bottomGradientView)
self.addSubview(self.gradientView)
self.fieldBackgroundView.addSubview(self.vibrancyEffectView)
self.addSubview(self.fieldBackgroundView)
self.addSubview(self.fieldBackgroundTint)
self.addSubview(self.textClippingView)
self.viewForOverlayContent = ViewForOverlayContent(
@ -876,7 +880,7 @@ public final class MessageInputPanelComponent: Component {
transition: placeholderTransition,
component: AnyComponent(AnimatedTextComponent(
font: Font.regular(17.0),
color: .white,
color: .black,
items: placeholderItems
)),
environment: {},
@ -912,7 +916,7 @@ public final class MessageInputPanelComponent: Component {
if let headerView = headerView as? ForwardInfoPanelComponent.View {
if headerView.superview == nil {
self.addSubview(headerView)
self.vibrancyEffectView.contentView.addSubview(headerView.backgroundView)
self.mediaRecordingVibrancyContainer.addSubview(headerView.backgroundView)
headerView.backgroundView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.4)
}
@ -965,11 +969,15 @@ public final class MessageInputPanelComponent: Component {
let rawFieldBackgroundFrame = fieldBackgroundFrame
fieldBackgroundFrame.size.height += headerHeight
transition.setFrame(view: self.vibrancyEffectView, frame: CGRect(origin: CGPoint(), size: fieldBackgroundFrame.size))
self.vibrancyEffectView.isHidden = false // component.style == .media
//transition.setFrame(view: self.vibrancyEffectView, frame: CGRect(origin: CGPoint(), size: fieldBackgroundFrame.size))
transition.setFrame(view: self.fieldBackgroundView, frame: fieldBackgroundFrame)
self.fieldBackgroundView.update(size: fieldBackgroundFrame.size, cornerRadius: headerHeight > 0.0 ? 18.0 : baseFieldHeight * 0.5, transition: transition.containedViewLayoutTransition)
transition.setFrame(view: self.fieldBackgroundTint, frame: fieldBackgroundFrame)
transition.setFrame(view: self.mediaRecordingVibrancyContainer, frame: CGRect(origin: CGPoint(), size: fieldBackgroundFrame.size))
//self.fieldBackgroundTint.backgroundColor = .blue
transition.setCornerRadius(layer: self.fieldBackgroundTint.layer, cornerRadius: headerHeight > 0.0 ? 18.0 : baseFieldHeight * 0.5)
var textClippingFrame = rawFieldBackgroundFrame.offsetBy(dx: 0.0, dy: headerHeight)
if component.style == .media, !isEditing {
@ -993,7 +1001,7 @@ public final class MessageInputPanelComponent: Component {
if let placeholderView = self.placeholder.view, let vibrancyPlaceholderView = self.vibrancyPlaceholder.view {
if vibrancyPlaceholderView.superview == nil {
vibrancyPlaceholderView.layer.anchorPoint = CGPoint()
self.vibrancyEffectView.contentView.addSubview(vibrancyPlaceholderView)
self.mediaRecordingVibrancyContainer.addSubview(vibrancyPlaceholderView)
vibrancyPlaceholderView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.4)
}
@ -1768,7 +1776,7 @@ public final class MessageInputPanelComponent: Component {
lightFieldColor = UIColor(white: 0.2, alpha: 0.45)
} else if self.textFieldExternalState.hasText && component.alwaysDarkWhenHasText {
fieldBackgroundIsDark = true
} else if isEditing || component.style == .editor {
} else if isEditing || component.style == .story || component.style == .editor {
fieldBackgroundIsDark = true
}
self.fieldBackgroundView.updateColor(color: fieldBackgroundIsDark ? UIColor(white: 0.0, alpha: 0.5) : lightFieldColor, transition: transition.containedViewLayoutTransition)

View File

@ -51,9 +51,14 @@ final class PeerInfoHeaderNavigationButtonContainerNode: SparseNode {
button.updateContentsColor(backgroundColor: self.backgroundContentColor, contentsColor: self.contentsColor, canBeExpanded: canBeExpanded, transition: transition)
transition.updateSublayerTransformOffset(layer: button.layer, offset: CGPoint(x: canBeExpanded ? -8.0 : 0.0, y: 0.0))
}
var accumulatedRightButtonOffset: CGFloat = canBeExpanded ? 16.0 : 0.0
for (_, button) in self.rightButtonNodes {
button.updateContentsColor(backgroundColor: self.backgroundContentColor, contentsColor: self.contentsColor, canBeExpanded: canBeExpanded, transition: transition)
transition.updateSublayerTransformOffset(layer: button.layer, offset: CGPoint(x: canBeExpanded ? 16.0 : 0.0, y: 0.0))
transition.updateSublayerTransformOffset(layer: button.layer, offset: CGPoint(x: accumulatedRightButtonOffset, y: 0.0))
if self.backgroundContentColor.alpha != 0.0 {
accumulatedRightButtonOffset -= 6.0
}
}
}
@ -171,8 +176,8 @@ final class PeerInfoHeaderNavigationButtonContainerNode: SparseNode {
if self.currentRightButtons != rightButtons || presentationData.strings !== self.presentationData?.strings {
self.currentRightButtons = rightButtons
var nextRegularButtonOrigin = size.width - sideInset
var nextExpandedButtonOrigin = size.width - sideInset
var nextRegularButtonOrigin = size.width - sideInset - 8.0
var nextExpandedButtonOrigin = size.width - sideInset - 8.0
for spec in rightButtons.reversed() {
let buttonNode: PeerInfoHeaderNavigationButton
var wasAdded = false
@ -248,8 +253,8 @@ final class PeerInfoHeaderNavigationButtonContainerNode: SparseNode {
}
}
} else {
var nextRegularButtonOrigin = size.width - sideInset
var nextExpandedButtonOrigin = size.width - sideInset
var nextRegularButtonOrigin = size.width - sideInset - 8.0
var nextExpandedButtonOrigin = size.width - sideInset - 8.0
for spec in rightButtons.reversed() {
var key = spec.key

View File

@ -78,7 +78,7 @@ public final class ForwardInfoPanelComponent: Component {
self.blurBackgroundView.clipsToBounds = true
self.backgroundView = UIImageView()
self.backgroundView.image = generateStretchableFilledCircleImage(radius: 4.0, color: UIColor(white: 1.0, alpha: 0.4))
self.backgroundView.image = generateStretchableFilledCircleImage(radius: 4.0, color: UIColor(white: 0.0, alpha: 0.4))
self.blockView = MessageInlineBlockBackgroundView()

View File

@ -1122,6 +1122,14 @@ private final class StoryContainerScreenComponent: Component {
self.didAnimateOut = true
}
func inFocusUpdated(isInFocus: Bool) {
for (_, itemSetView) in self.visibleItemSetViews {
if let itemSetComponentView = itemSetView.view.view as? StoryItemSetContainerComponent.View {
itemSetComponentView.inFocusUpdated(isInFocus: isInFocus)
}
}
}
private func updateVolumeButtonMonitoring() {
guard self.volumeButtonsListener == nil, let component = self.component else {
return
@ -2126,6 +2134,14 @@ public class StoryContainerScreen: ViewControllerComponentContainer {
}
}
}
override public func inFocusUpdated(isInFocus: Bool) {
super.inFocusUpdated(isInFocus: isInFocus)
if let componentView = self.node.hostView.componentView as? StoryContainerScreenComponent.View {
componentView.inFocusUpdated(isInFocus: isInFocus)
}
}
}
func allowedStoryReactions(context: AccountContext) -> Signal<[ReactionItem], NoError> {

View File

@ -1908,6 +1908,10 @@ public final class StoryItemSetContainerComponent: Component {
}
}
func inFocusUpdated(isInFocus: Bool) {
self.updateIsProgressPaused()
}
func activateInput() -> Bool {
guard let component = self.component else {
return false
@ -5205,6 +5209,7 @@ public final class StoryItemSetContainerComponent: Component {
}
navigationController.setViewControllers(viewControllers, animated: true)
}
self.updateIsProgressPaused()
}
func navigateToPeer(peer: EnginePeer, chat: Bool, subject: ChatControllerSubject? = nil) {

View File

@ -410,7 +410,7 @@ public final class TextFieldComponent: Component {
}
self.updateInputState { state in
if let characterLimit = component.characterLimit, state.inputText.length + text.length > characterLimit {
if let characterLimit = component.characterLimit, state.inputText.string.count + text.string.count > characterLimit {
return state
}
return state.insertText(text)
@ -732,14 +732,21 @@ public final class TextFieldComponent: Component {
}
if let characterLimit = component.characterLimit {
let replacementString = text as NSString
let string = self.inputState.inputText.string as NSString
let deltaLength = replacementString.length - range.length
let resultingLength = string.length + deltaLength
let changingRangeString = string.substring(with: range)
let deltaLength = text.count - changingRangeString.count
let resultingLength = (string as String).count + deltaLength
if resultingLength > characterLimit {
let availableLength = characterLimit - string.length
let availableLength = characterLimit - (string as String).count
if availableLength > 0 {
var insertString = replacementString.substring(to: availableLength)
var insertString = ""
for i in 0 ..< availableLength {
if text.count <= i {
break
}
insertString.append(text[text.index(text.startIndex, offsetBy: i)])
}
switch component.emptyLineHandling {
case .allowed:

View File

@ -2395,6 +2395,11 @@ extension ChatControllerImpl {
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: interactive, { current in
return current.updatedSearch(current.search == nil ? ChatSearchData(domain: domain).withUpdatedQuery(query) : current.search?.withUpdatedDomain(domain).withUpdatedQuery(query))
}, completion: { [weak strongSelf] _ in
guard let strongSelf else {
return
}
strongSelf.chatDisplayNode.searchNavigationNode?.activate()
})
strongSelf.updateItemNodesSearchTextHighlightStates()
})

View File

@ -54,6 +54,7 @@ final class VideoNavigationControllerDropContentItem: NavigationControllerDropCo
}
private final class ChatControllerNodeView: UITracingLayerView, WindowInputAccessoryHeightProvider {
weak var node: ChatControllerNode?
var inputAccessoryHeight: (() -> CGFloat)?
var hitTestImpl: ((CGPoint, UIEvent?) -> UIView?)?
@ -65,7 +66,17 @@ private final class ChatControllerNodeView: UITracingLayerView, WindowInputAcces
if let result = self.hitTestImpl?(point, event) {
return result
}
return super.hitTest(point, with: event)
guard let result = super.hitTest(point, with: event) else {
return nil
}
if let node = self.node {
if result === node.historyNodeContainer.view {
if node.historyNode.alpha == 0.0 {
return nil
}
}
}
return result
}
}
@ -172,7 +183,7 @@ class ChatControllerNode: ASDisplayNode, ASScrollViewDelegate {
private(set) var validLayout: (ContainerViewLayout, CGFloat)?
private var visibleAreaInset = UIEdgeInsets()
private var searchNavigationNode: ChatSearchNavigationContentNode?
private(set) var searchNavigationNode: ChatSearchNavigationContentNode?
private var navigationModalFrame: NavigationModalFrame?
@ -727,6 +738,8 @@ class ChatControllerNode: ASDisplayNode, ASScrollViewDelegate {
return ChatControllerNodeView()
})
(self.view as? ChatControllerNodeView)?.node = self
(self.view as? ChatControllerNodeView)?.inputAccessoryHeight = { [weak self] in
if let strongSelf = self {
return strongSelf.getWindowInputAccessoryHeight()