Merge commit '43741a3b205f9dc1bbff056045f4236520ca90e9'

This commit is contained in:
Isaac 2024-11-29 17:05:18 +04:00
commit c49b242cd5
16 changed files with 294 additions and 139 deletions

View File

@ -13297,3 +13297,5 @@ Sorry for the inconvenience.";
"WebApp.ShareMessage.PreviewTitle" = "MESSAGE PREVIEW";
"WebApp.ShareMessage.Info" = "%@ mini app suggests you to send this message to a chat you select.";
"WebApp.ShareMessage.Share" = "Share With...";
"Notification.Gift" = "Gift";

View File

@ -379,18 +379,19 @@ private final class CameraContext {
return
}
var front = false
if #available(iOS 13.0, *) {
front = connection.inputPorts.first?.sourceDevicePosition == .front
}
if sampleBuffer.type == kCMMediaType_Video {
Queue.mainQueue().async {
self.videoOutput?.push(sampleBuffer)
self.videoOutput?.push(sampleBuffer, mirror: front)
}
}
let timestamp = CACurrentMediaTime()
if timestamp > self.lastSnapshotTimestamp + 2.5, !mainDeviceContext.output.isRecording || !self.savedSnapshot {
var front = false
if #available(iOS 13.0, *) {
front = connection.inputPorts.first?.sourceDevicePosition == .front
}
self.savePreviewSnapshot(pixelBuffer: pixelBuffer, front: front)
self.lastSnapshotTimestamp = timestamp
self.savedSnapshot = true
@ -1140,13 +1141,13 @@ public enum CameraRecordingError {
}
public class CameraVideoOutput {
private let sink: (CMSampleBuffer) -> Void
private let sink: (CMSampleBuffer, Bool) -> Void
public init(sink: @escaping (CMSampleBuffer) -> Void) {
public init(sink: @escaping (CMSampleBuffer, Bool) -> Void) {
self.sink = sink
}
func push(_ buffer: CMSampleBuffer) {
self.sink(buffer)
func push(_ buffer: CMSampleBuffer, mirror: Bool) {
self.sink(buffer, mirror)
}
}

View File

@ -2081,6 +2081,7 @@ public class ChatListItemNode: ItemListRevealOptionsItemNode {
var currentSecretIconImage: UIImage?
var currentForwardedIcon: UIImage?
var currentStoryIcon: UIImage?
var currentGiftIcon: UIImage?
var selectableControlSizeAndApply: (CGFloat, (CGSize, Bool) -> ItemListSelectableControlNode)?
var reorderControlSizeAndApply: (CGFloat, (CGFloat, Bool, ContainedViewLayoutTransition) -> ItemListEditableReorderControlNode)?
@ -2254,6 +2255,7 @@ public class ChatListItemNode: ItemListRevealOptionsItemNode {
var displayForwardedIcon = false
var displayStoryReplyIcon = false
var displayGiftIcon = false
var ignoreForwardedIcon = false
switch contentData {
@ -2562,6 +2564,22 @@ public class ChatListItemNode: ItemListRevealOptionsItemNode {
displayForwardedIcon = true
} else if let _ = message.attributes.first(where: { $0 is ReplyStoryAttribute }) {
displayStoryReplyIcon = true
} else {
for media in message.media {
if let action = media as? TelegramMediaAction {
switch action.action {
case .giftPremium, .giftStars, .starGift:
displayGiftIcon = true
case let .giftCode(_, _, _, boostPeerId, _, _, _, _, _, _, _):
if boostPeerId == nil {
displayGiftIcon = true
}
default:
break
}
}
break
}
}
}
@ -2716,6 +2734,10 @@ public class ChatListItemNode: ItemListRevealOptionsItemNode {
currentStoryIcon = PresentationResourcesChatList.storyReplyIcon(item.presentationData.theme)
}
if displayGiftIcon {
currentGiftIcon = PresentationResourcesChatList.giftIcon(item.presentationData.theme)
}
if let currentForwardedIcon {
textLeftCutout += currentForwardedIcon.size.width
if !contentImageSpecs.isEmpty {
@ -2734,6 +2756,15 @@ public class ChatListItemNode: ItemListRevealOptionsItemNode {
}
}
if let currentGiftIcon {
textLeftCutout += currentGiftIcon.size.width
if !contentImageSpecs.isEmpty {
textLeftCutout += forwardedIconSpacing
} else {
textLeftCutout += contentImageTrailingSpace
}
}
for i in 0 ..< contentImageSpecs.count {
if i != 0 {
textLeftCutout += contentImageSpacing
@ -4261,6 +4292,9 @@ public class ChatListItemNode: ItemListRevealOptionsItemNode {
messageTypeIconOffset.y += 3.0
} else if let currentStoryIcon {
messageTypeIcon = currentStoryIcon
} else if let currentGiftIcon {
messageTypeIcon = currentGiftIcon
messageTypeIconOffset.y -= 2.0 - UIScreenPixel
}
if let messageTypeIcon {

View File

@ -119,6 +119,7 @@ public enum PresentationResourceKey: Int32 {
case chatListForwardedIcon
case chatListStoryReplyIcon
case chatListGiftIcon
case chatListGeneralTopicIcon
case chatListGeneralTopicSmallIcon

View File

@ -259,6 +259,12 @@ public struct PresentationResourcesChatList {
})
}
public static func giftIcon(_ theme: PresentationTheme) -> UIImage? {
return theme.image(PresentationResourceKey.chatListGiftIcon.rawValue, { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat List/GiftIcon"), color: theme.chatList.muteIconColor)
})
}
public static func verifiedIcon(_ theme: PresentationTheme) -> UIImage? {
return theme.image(PresentationResourceKey.chatListVerifiedIcon.rawValue, { theme in
if let backgroundImage = UIImage(bundleImageName: "Chat List/PeerVerifiedIconBackground"), let foregroundImage = UIImage(bundleImageName: "Chat List/PeerVerifiedIconForeground") {

View File

@ -736,29 +736,37 @@ public func universalServiceMessageString(presentationData: (PresentationTheme,
case let .webViewData(text):
attributedString = NSAttributedString(string: strings.Notification_WebAppSentData(text).string, font: titleFont, textColor: primaryTextColor)
case let .giftPremium(currency, amount, _, _, _, _, _):
let price = formatCurrencyAmount(amount, currency: currency)
if message.author?.id == accountPeerId {
attributedString = addAttributesToStringWithRanges(strings.Notification_PremiumGift_SentYou(price)._tuple, body: bodyAttributes, argumentAttributes: [0: boldAttributes])
if !forAdditionalServiceMessage {
attributedString = NSAttributedString(string: strings.Notification_Gift, font: titleFont, textColor: primaryTextColor)
} else {
var attributes = peerMentionsAttributes(primaryTextColor: primaryTextColor, peerIds: [(0, message.author?.id)])
attributes[1] = boldAttributes
attributedString = addAttributesToStringWithRanges(strings.Notification_PremiumGift_Sent(compactAuthorName, price)._tuple, body: bodyAttributes, argumentAttributes: attributes)
let price = formatCurrencyAmount(amount, currency: currency)
if message.author?.id == accountPeerId {
attributedString = addAttributesToStringWithRanges(strings.Notification_PremiumGift_SentYou(price)._tuple, body: bodyAttributes, argumentAttributes: [0: boldAttributes])
} else {
var attributes = peerMentionsAttributes(primaryTextColor: primaryTextColor, peerIds: [(0, message.author?.id)])
attributes[1] = boldAttributes
attributedString = addAttributesToStringWithRanges(strings.Notification_PremiumGift_Sent(compactAuthorName, price)._tuple, body: bodyAttributes, argumentAttributes: attributes)
}
}
case let .giftStars(currency, amount, count, _, _, _):
let _ = count
let price = formatCurrencyAmount(amount, currency: currency)
if message.author?.id == accountPeerId {
attributedString = addAttributesToStringWithRanges(strings.Notification_StarsGift_SentYou(price)._tuple, body: bodyAttributes, argumentAttributes: [0: boldAttributes])
if !forAdditionalServiceMessage {
attributedString = NSAttributedString(string: strings.Notification_Gift, font: titleFont, textColor: primaryTextColor)
} else {
var authorName = compactAuthorName
var peerIds: [(Int, EnginePeer.Id?)] = [(0, message.author?.id)]
if message.id.peerId.namespace == Namespaces.Peer.CloudUser && message.id.peerId.id._internalGetInt64Value() == 777000 {
authorName = strings.Notification_StarsGift_UnknownUser
peerIds = []
let price = formatCurrencyAmount(amount, currency: currency)
if message.author?.id == accountPeerId {
attributedString = addAttributesToStringWithRanges(strings.Notification_StarsGift_SentYou(price)._tuple, body: bodyAttributes, argumentAttributes: [0: boldAttributes])
} else {
var authorName = compactAuthorName
var peerIds: [(Int, EnginePeer.Id?)] = [(0, message.author?.id)]
if message.id.peerId.namespace == Namespaces.Peer.CloudUser && message.id.peerId.id._internalGetInt64Value() == 777000 {
authorName = strings.Notification_StarsGift_UnknownUser
peerIds = []
}
var attributes = peerMentionsAttributes(primaryTextColor: primaryTextColor, peerIds: peerIds)
attributes[1] = boldAttributes
attributedString = addAttributesToStringWithRanges(strings.Notification_StarsGift_Sent(authorName, price)._tuple, body: bodyAttributes, argumentAttributes: attributes)
}
var attributes = peerMentionsAttributes(primaryTextColor: primaryTextColor, peerIds: peerIds)
attributes[1] = boldAttributes
attributedString = addAttributesToStringWithRanges(strings.Notification_StarsGift_Sent(authorName, price)._tuple, body: bodyAttributes, argumentAttributes: attributes)
}
case let .topicCreated(title, iconColor, iconFileId):
if forForumOverview {
@ -957,13 +965,17 @@ public func universalServiceMessageString(presentationData: (PresentationTheme,
let mutableAttributedString = NSMutableAttributedString(attributedString: stringWithAppliedEntities(text, entities: entities ?? [], baseColor: primaryTextColor, linkColor: primaryTextColor, baseFont: titleFont, linkFont: titleBoldFont, boldFont: titleBoldFont, italicFont: titleFont, boldItalicFont: titleBoldFont, fixedFont: titleFont, blockQuoteFont: titleFont, underlineLinks: false, message: message._asMessage()))
attributedString = mutableAttributedString
} else if boostPeerId == nil, let currency, let amount {
let price = formatCurrencyAmount(amount, currency: currency)
if message.author?.id == accountPeerId {
attributedString = addAttributesToStringWithRanges(strings.Notification_PremiumGift_SentYou(price)._tuple, body: bodyAttributes, argumentAttributes: [0: boldAttributes])
if !forAdditionalServiceMessage {
attributedString = NSAttributedString(string: strings.Notification_Gift, font: titleFont, textColor: primaryTextColor)
} else {
var attributes = peerMentionsAttributes(primaryTextColor: primaryTextColor, peerIds: [(0, message.author?.id)])
attributes[1] = boldAttributes
attributedString = addAttributesToStringWithRanges(strings.Notification_PremiumGift_Sent(compactAuthorName, price)._tuple, body: bodyAttributes, argumentAttributes: attributes)
let price = formatCurrencyAmount(amount, currency: currency)
if message.author?.id == accountPeerId {
attributedString = addAttributesToStringWithRanges(strings.Notification_PremiumGift_SentYou(price)._tuple, body: bodyAttributes, argumentAttributes: [0: boldAttributes])
} else {
var attributes = peerMentionsAttributes(primaryTextColor: primaryTextColor, peerIds: [(0, message.author?.id)])
attributes[1] = boldAttributes
attributedString = addAttributesToStringWithRanges(strings.Notification_PremiumGift_Sent(compactAuthorName, price)._tuple, body: bodyAttributes, argumentAttributes: attributes)
}
}
} else {
attributedString = NSAttributedString(string: strings.Notification_GiftLink, font: titleFont, textColor: primaryTextColor)
@ -1055,9 +1067,13 @@ public func universalServiceMessageString(presentationData: (PresentationTheme,
case .prizeStars:
attributedString = NSAttributedString(string: strings.Notification_StarsPrize, font: titleFont, textColor: primaryTextColor)
case let .starGift(gift, _, text, entities, _, _, _):
if !forAdditionalServiceMessage, let text {
let mutableAttributedString = NSMutableAttributedString(attributedString: stringWithAppliedEntities(text, entities: entities ?? [], baseColor: primaryTextColor, linkColor: primaryTextColor, baseFont: titleFont, linkFont: titleBoldFont, boldFont: titleBoldFont, italicFont: titleFont, boldItalicFont: titleBoldFont, fixedFont: titleFont, blockQuoteFont: titleFont, underlineLinks: false, message: message._asMessage()))
attributedString = mutableAttributedString
if !forAdditionalServiceMessage {
if let text {
let mutableAttributedString = NSMutableAttributedString(attributedString: stringWithAppliedEntities(text, entities: entities ?? [], baseColor: primaryTextColor, linkColor: primaryTextColor, baseFont: titleFont, linkFont: titleBoldFont, boldFont: titleBoldFont, italicFont: titleFont, boldItalicFont: titleBoldFont, fixedFont: titleFont, blockQuoteFont: titleFont, underlineLinks: false, message: message._asMessage()))
attributedString = mutableAttributedString
} else {
attributedString = NSAttributedString(string: strings.Notification_Gift, font: titleFont, textColor: primaryTextColor)
}
} else {
let starsPrice = strings.Notification_StarsGift_Stars(Int32(gift.price))
var authorName = compactAuthorName

View File

@ -234,6 +234,13 @@ final class CameraCollage {
self.uniqueIds.append(Int64.random(in: .min ... .max))
}
func getItem(id: Int64) -> CaptureResult? {
guard let index = self.uniqueIds.firstIndex(where: { $0 == id }) else {
return nil
}
return self.results[index]
}
private func checkResults() {
self.results = self.results.filter { $0.content != nil }
}

View File

@ -130,6 +130,7 @@ enum CameraScreenTransition {
case animateIn
case animateOut
case finishedAnimateIn
case flashModeChanged
}
private let cancelButtonTag = GenericComponentViewTag()
@ -1134,6 +1135,9 @@ private final class CameraScreenComponent: CombinedComponent {
let flashContentComponent: AnyComponentWithIdentity<Empty>
if component.hasAppeared {
let animationHint = context.transition.userData(CameraScreenTransition.self)
let shouldAnimateIcon = component.cameraState.flashModeDidChange && animationHint == .flashModeChanged
let flashIconName: String
switch component.cameraState.flashMode {
case .off:
@ -1157,7 +1161,7 @@ private final class CameraScreenComponent: CombinedComponent {
LottieAnimationComponent(
animation: LottieAnimationComponent.AnimationItem(
name: flashIconName,
mode: !component.cameraState.flashModeDidChange ? .still(position: .end) : .animating(loop: false),
mode: shouldAnimateIcon ? .animating(loop: false) : .still(position: .end),
range: nil,
waitForCompletion: false
),
@ -1318,7 +1322,7 @@ private final class CameraScreenComponent: CombinedComponent {
state?.updateCollageGrid(grid)
}
),
availableSize: CGSize(width: nextButtonX, height: 40.0),
availableSize: CGSize(width: nextButtonX + 4.0, height: 40.0),
transition: .immediate
)
context.add(collageCarousel
@ -2103,7 +2107,11 @@ public class CameraScreenImpl: ViewController, CameraScreen {
let previousState = self.cameraState
self.cameraState = self.cameraState.updatedPosition(position).updatedFlashMode(flashMode)
if !self.animatingDualCameraPositionSwitch {
self.requestUpdateLayout(transition: .easeInOut(duration: 0.2))
var transition: ComponentTransition = .easeInOut(duration: 0.2)
if previousState.flashMode != flashMode {
transition = transition.withUserData(CameraScreenTransition.flashModeChanged)
}
self.requestUpdateLayout(transition: transition)
}
if previousState.position != self.cameraState.position {
@ -2255,15 +2263,15 @@ public class CameraScreenImpl: ViewController, CameraScreen {
case .began:
break
case .changed:
if case .none = self.cameraState.recording, self.cameraState.collageProgress.isZero {
if case .none = self.cameraState.recording {
if case .compact = layout.metrics.widthClass {
switch controller.mode {
case .story:
if (translation.x < -10.0 || self.isDismissing) && self.hasAppeared {
if (translation.x < -10.0 || self.isDismissing) && self.hasAppeared && self.cameraState.collageProgress.isZero {
self.isDismissing = true
let transitionFraction = 1.0 - max(0.0, translation.x * -1.0) / self.frame.width
controller.updateTransitionProgress(transitionFraction, transition: .immediate)
} else if translation.y < -10.0 && abs(translation.y) > abs(translation.x) {
} else if translation.y < -10.0 && abs(translation.y) > abs(translation.x) && self.cameraState.collageProgress < 1.0 {
controller.presentGallery(fromGesture: true)
gestureRecognizer.isEnabled = false
gestureRecognizer.isEnabled = true
@ -2582,6 +2590,10 @@ public class CameraScreenImpl: ViewController, CameraScreen {
view.layer.animateScale(from: 1.0, to: 0.1, duration: 0.2)
transition.setAlpha(view: view, alpha: 0.0)
}
if let view = self.componentHost.findTaggedView(tag: collageButtonTag) {
view.layer.animateScale(from: 1.0, to: 0.1, duration: 0.2)
transition.setAlpha(view: view, alpha: 0.0)
}
if let view = self.componentHost.findTaggedView(tag: zoomControlTag) {
transition.setAlpha(view: view, alpha: 0.0)
}
@ -2592,7 +2604,7 @@ public class CameraScreenImpl: ViewController, CameraScreen {
view.animateOutToEditor(transition: transition)
}
Queue.mainQueue().after(1.0, {
Queue.mainQueue().after(2.0, {
if self.cameraState.isCollageEnabled {
self.collage = nil
if let collageView = self.collageView {
@ -2672,6 +2684,10 @@ public class CameraScreenImpl: ViewController, CameraScreen {
view.layer.animateScale(from: 0.1, to: 1.0, duration: 0.2)
transition.setAlpha(view: view, alpha: 1.0)
}
if let view = self.componentHost.findTaggedView(tag: collageButtonTag) {
view.layer.animateScale(from: 0.1, to: 1.0, duration: 0.2)
transition.setAlpha(view: view, alpha: 1.0)
}
if let view = self.componentHost.findTaggedView(tag: zoomControlTag) {
view.layer.animateScale(from: 0.1, to: 1.0, duration: 0.2)
transition.setAlpha(view: view, alpha: 1.0)

View File

@ -247,96 +247,66 @@ final class CameraVideoLayer: MetalEngineSubjectLayer, MetalEngineSubject {
computeEncoder.endEncoding()
})
if !self.blurredLayer.isHidden {
guard let downscaledTexture = self.downscaledTexture?.get(context: context), let blurredHorizontalTexture = self.blurredHorizontalTexture?.get(context: context), let blurredVerticalTexture = self.blurredVerticalTexture?.get(context: context) else {
return
guard let downscaledTexture = self.downscaledTexture?.get(context: context), let blurredHorizontalTexture = self.blurredHorizontalTexture?.get(context: context), let blurredVerticalTexture = self.blurredVerticalTexture?.get(context: context) else {
return
}
let blurredTexture = context.compute(state: BlurState.self, inputs: rgbaTexture.placeholer, downscaledTexture.placeholer, blurredHorizontalTexture.placeholer, blurredVerticalTexture.placeholer, commands: { commandBuffer, blurState, rgbaTexture, downscaledTexture, blurredHorizontalTexture, blurredVerticalTexture -> MTLTexture? in
guard let rgbaTexture, let downscaledTexture, let blurredHorizontalTexture, let blurredVerticalTexture else {
return nil
}
let blurredTexture = context.compute(state: BlurState.self, inputs: rgbaTexture.placeholer, downscaledTexture.placeholer, blurredHorizontalTexture.placeholer, blurredVerticalTexture.placeholer, commands: { commandBuffer, blurState, rgbaTexture, downscaledTexture, blurredHorizontalTexture, blurredVerticalTexture -> MTLTexture? in
guard let rgbaTexture, let downscaledTexture, let blurredHorizontalTexture, let blurredVerticalTexture else {
blurState.downscaleKernel.encode(commandBuffer: commandBuffer, sourceTexture: rgbaTexture, destinationTexture: downscaledTexture)
do {
guard let computeEncoder = commandBuffer.makeComputeCommandEncoder() else {
return nil
}
blurState.downscaleKernel.encode(commandBuffer: commandBuffer, sourceTexture: rgbaTexture, destinationTexture: downscaledTexture)
let threadgroupSize = MTLSize(width: 16, height: 16, depth: 1)
let threadgroupCount = MTLSize(width: (downscaledTexture.width + threadgroupSize.width - 1) / threadgroupSize.width, height: (downscaledTexture.height + threadgroupSize.height - 1) / threadgroupSize.height, depth: 1)
do {
guard let computeEncoder = commandBuffer.makeComputeCommandEncoder() else {
return nil
}
let threadgroupSize = MTLSize(width: 16, height: 16, depth: 1)
let threadgroupCount = MTLSize(width: (downscaledTexture.width + threadgroupSize.width - 1) / threadgroupSize.width, height: (downscaledTexture.height + threadgroupSize.height - 1) / threadgroupSize.height, depth: 1)
computeEncoder.setComputePipelineState(blurState.computePipelineStateHorizontal)
computeEncoder.setTexture(downscaledTexture, index: 0)
computeEncoder.setTexture(blurredHorizontalTexture, index: 1)
computeEncoder.dispatchThreadgroups(threadgroupCount, threadsPerThreadgroup: threadgroupSize)
computeEncoder.setComputePipelineState(blurState.computePipelineStateVertical)
computeEncoder.setTexture(blurredHorizontalTexture, index: 0)
computeEncoder.setTexture(blurredVerticalTexture, index: 1)
computeEncoder.dispatchThreadgroups(threadgroupCount, threadsPerThreadgroup: threadgroupSize)
computeEncoder.endEncoding()
}
computeEncoder.setComputePipelineState(blurState.computePipelineStateHorizontal)
computeEncoder.setTexture(downscaledTexture, index: 0)
computeEncoder.setTexture(blurredHorizontalTexture, index: 1)
computeEncoder.dispatchThreadgroups(threadgroupCount, threadsPerThreadgroup: threadgroupSize)
return blurredVerticalTexture
})
computeEncoder.setComputePipelineState(blurState.computePipelineStateVertical)
computeEncoder.setTexture(blurredHorizontalTexture, index: 0)
computeEncoder.setTexture(blurredVerticalTexture, index: 1)
computeEncoder.dispatchThreadgroups(threadgroupCount, threadsPerThreadgroup: threadgroupSize)
computeEncoder.endEncoding()
}
context.renderToLayer(spec: renderSpec, state: RenderState.self, layer: self.blurredLayer, inputs: blurredTexture, commands: { encoder, placement, blurredTexture in
guard let blurredTexture else {
return
}
let effectiveRect = placement.effectiveRect
var rect = SIMD4<Float>(Float(effectiveRect.minX), Float(effectiveRect.minY), Float(effectiveRect.width), Float(effectiveRect.height))
encoder.setVertexBytes(&rect, length: 4 * 4, index: 0)
var mirror = SIMD2<UInt32>(
videoTextures.mirrorDirection.contains(.horizontal) ? 1 : 0,
videoTextures.mirrorDirection.contains(.vertical) ? 1 : 0
)
encoder.setVertexBytes(&mirror, length: 2 * 4, index: 1)
encoder.setFragmentTexture(blurredTexture, index: 0)
var brightness: Float = 0.85
var saturation: Float = 1.3
var overlay: SIMD4<Float> = SIMD4<Float>()
encoder.setFragmentBytes(&brightness, length: 4, index: 0)
encoder.setFragmentBytes(&saturation, length: 4, index: 1)
encoder.setFragmentBytes(&overlay, length: 4 * 4, index: 2)
encoder.drawPrimitives(type: .triangle, vertexStart: 0, vertexCount: 6)
})
}
// context.renderToLayer(spec: renderSpec, state: RenderState.self, layer: self, inputs: rgbaTexture.placeholer, commands: { encoder, placement, rgbaTexture in
// guard let rgbaTexture else {
// return
// }
//
// let effectiveRect = placement.effectiveRect
//
// var rect = SIMD4<Float>(Float(effectiveRect.minX), Float(effectiveRect.minY), Float(effectiveRect.width), Float(effectiveRect.height))
// encoder.setVertexBytes(&rect, length: 4 * 4, index: 0)
//
// var mirror = SIMD2<UInt32>(
// videoTextures.mirrorDirection.contains(.horizontal) ? 1 : 0,
// videoTextures.mirrorDirection.contains(.vertical) ? 1 : 0
// )
// encoder.setVertexBytes(&mirror, length: 2 * 4, index: 1)
//
// encoder.setFragmentTexture(rgbaTexture, index: 0)
//
// var brightness: Float = 1.0
// var saturation: Float = 1.0
// var overlay: SIMD4<Float> = SIMD4<Float>()
// encoder.setFragmentBytes(&brightness, length: 4, index: 0)
// encoder.setFragmentBytes(&saturation, length: 4, index: 1)
// encoder.setFragmentBytes(&overlay, length: 4 * 4, index: 2)
//
// encoder.drawPrimitives(type: .triangle, vertexStart: 0, vertexCount: 6)
// })
return blurredVerticalTexture
})
context.renderToLayer(spec: renderSpec, state: RenderState.self, layer: self.blurredLayer, inputs: blurredTexture, commands: { encoder, placement, blurredTexture in
guard let blurredTexture else {
return
}
let effectiveRect = placement.effectiveRect
var rect = SIMD4<Float>(Float(effectiveRect.minX), Float(effectiveRect.minY), Float(effectiveRect.width), Float(effectiveRect.height))
encoder.setVertexBytes(&rect, length: 4 * 4, index: 0)
var mirror = SIMD2<UInt32>(
videoTextures.mirrorDirection.contains(.horizontal) ? 1 : 0,
videoTextures.mirrorDirection.contains(.vertical) ? 1 : 0
)
encoder.setVertexBytes(&mirror, length: 2 * 4, index: 1)
encoder.setFragmentTexture(blurredTexture, index: 0)
var brightness: Float = 0.85
var saturation: Float = 1.3
var overlay: SIMD4<Float> = SIMD4<Float>()
encoder.setFragmentBytes(&brightness, length: 4, index: 0)
encoder.setFragmentBytes(&saturation, length: 4, index: 1)
encoder.setFragmentBytes(&overlay, length: 4 * 4, index: 2)
encoder.drawPrimitives(type: .triangle, vertexStart: 0, vertexCount: 6)
})
}
}

View File

@ -21,8 +21,8 @@ final class CameraVideoSource: VideoSource {
public init?() {
self.device = MetalEngine.shared.device
self.cameraVideoOutput = CameraVideoOutput(sink: { [weak self] buffer in
self?.push(buffer)
self.cameraVideoOutput = CameraVideoOutput(sink: { [weak self] buffer, mirror in
self?.push(buffer, mirror: mirror)
})
CVMetalTextureCacheCreate(nil, nil, self.device, nil, &self.textureCache)
@ -41,7 +41,7 @@ final class CameraVideoSource: VideoSource {
}
}
private func push(_ sampleBuffer: CMSampleBuffer) {
private func push(_ sampleBuffer: CMSampleBuffer, mirror: Bool) {
guard let buffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
return
}
@ -71,7 +71,7 @@ final class CameraVideoSource: VideoSource {
uv: uvTexture
)),
dataBuffer: Output.NativeDataBuffer(pixelBuffer: buffer),
mirrorDirection: [],
mirrorDirection: mirror ? [.vertical] : [],
sourceId: self.sourceId
)

View File

@ -213,7 +213,7 @@ final class CollageIconCarouselComponent: Component {
self.state = state
let inset: CGFloat = 27.0
let spacing: CGFloat = 8.0
let spacing: CGFloat = availableSize.width > 290.0 ? 7.0 : 8.0
var contentWidth: CGFloat = inset
let buttonSize = CGSize(width: 40.0, height: 40.0)
@ -275,7 +275,7 @@ final class CollageIconCarouselComponent: Component {
self.clippingView.frame = CGRect(origin: .zero, size: availableSize)
if self.clippingView.mask == nil {
if let maskImage = generateGradientImage(size: CGSize(width: 42.0, height: 10.0), colors: [UIColor.clear, UIColor.black, UIColor.black, UIColor.clear], locations: [0.0, 0.3, 0.7, 1.0], direction: .horizontal) {
if let maskImage = generateGradientImage(size: CGSize(width: 42.0, height: 10.0), colors: [UIColor.clear, UIColor.black, UIColor.black, UIColor.clear], locations: [0.0, 0.2, 0.8, 1.0], direction: .horizontal) {
let maskView = UIImageView(image: maskImage.stretchableImage(withLeftCapWidth: 13, topCapHeight: 0))
self.clippingView.mask = maskView
}

View File

@ -1149,6 +1149,11 @@ public final class MediaEditor {
public func setVideoIsMuted(_ videoIsMuted: Bool) {
self.player?.isMuted = videoIsMuted
if !self.values.collage.isEmpty {
for player in self.additionalPlayers {
player.isMuted = videoIsMuted
}
}
self.updateValues(mode: .skipRendering) { values in
return values.withUpdatedVideoIsMuted(videoIsMuted)
}
@ -1857,9 +1862,11 @@ public final class MediaEditor {
public func collageItemIndexForTrackId(_ trackId: Int32) -> Int? {
var collageIndex = -1
var trackIndex = 0
var trackIndex = -1
for item in self.values.collage {
if case .videoFile = item.content {
if case .main = item.content {
trackIndex += 1
} else if case .videoFile = item.content {
trackIndex += 1
} else if case .asset(_, true) = item.content {
trackIndex += 1

View File

@ -216,6 +216,7 @@ public final class MediaEditorVideoExport {
}
var skippingUpdate = false
var initialized = false
}
private var additionalVideoOutput: [Int: VideoOutput] = [:]
@ -761,10 +762,32 @@ public final class MediaEditorVideoExport {
for i in 0 ..< self.additionalVideoOutput.count {
if let additionalVideoOutput = self.additionalVideoOutput[i] {
if let mainTimestamp, mainTimestamp < additionalVideoOutput.startTime {
if !self.configuration.values.collage.isEmpty && !additionalVideoOutput.initialized {
additionalVideoOutput.initialized = true
if case let .videoOutput(videoOutput) = additionalVideoOutput.output {
if let _ = videoOutput.copyNextSampleBuffer(), let sampleBuffer = videoOutput.copyNextSampleBuffer() {
if let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
additionalInput.append(.videoBuffer(VideoPixelBuffer(
pixelBuffer: pixelBuffer,
rotation: additionalVideoOutput.textureRotation,
timestamp: .zero
), additionalVideoOutput.rect))
} else {
additionalInput.append(nil)
}
} else {
additionalInput.append(nil)
}
} else {
additionalInput.append(nil)
}
} else {
additionalInput.append(nil)
}
} else {
if additionalVideoOutput.skippingUpdate {
additionalVideoOutput.skippingUpdate = false
additionalInput.append(nil)
} else {
switch additionalVideoOutput.output {
case let .image(image):
@ -787,6 +810,8 @@ public final class MediaEditorVideoExport {
self.statusValue = .progress(Float(progress))
updatedProgress = true
}
} else {
additionalInput.append(nil)
}
if let mainComposeFramerate = self.mainComposeFramerate {
let additionalFrameRate = round(additionalVideoOutput.frameRate / 30.0) * 30.0
@ -794,7 +819,9 @@ public final class MediaEditorVideoExport {
additionalVideoOutput.skippingUpdate = true
}
}
}
} else {
additionalInput.append(nil)
}
}
}
}

View File

@ -3208,8 +3208,18 @@ public final class MediaEditorScreenImpl: ViewController, MediaEditorScreen, UID
if self.controller?.isEmbeddedEditor == true {
} else {
self.previewContainerView.alpha = 1.0
if CACurrentMediaTime() - self.initializationTimestamp > 0.2, case .image = subject {
if case .videoCollage = subject {
Queue.mainQueue().after(0.7) {
self.previewContainerView.alpha = 1.0
self.previewContainerView.layer.allowsGroupOpacity = true
self.previewContainerView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25, completion: { _ in
self.previewContainerView.layer.allowsGroupOpacity = false
self.previewContainerView.alpha = 1.0
self.backgroundDimView.isHidden = false
})
}
} else if CACurrentMediaTime() - self.initializationTimestamp > 0.2, case .image = subject {
self.previewContainerView.alpha = 1.0
self.previewContainerView.layer.allowsGroupOpacity = true
self.previewContainerView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25, completion: { _ in
self.previewContainerView.layer.allowsGroupOpacity = false
@ -3217,6 +3227,7 @@ public final class MediaEditorScreenImpl: ViewController, MediaEditorScreen, UID
self.backgroundDimView.isHidden = false
})
} else {
self.previewContainerView.alpha = 1.0
self.backgroundDimView.isHidden = false
}
}
@ -7736,8 +7747,53 @@ public final class MediaEditorScreenImpl: ViewController, MediaEditorScreen, UID
let asset = AVURLAsset(url: NSURL(fileURLWithPath: path) as URL)
exportSubject = .single(.video(asset: asset, isStory: true))
case let .videoCollage(items):
let _ = items
exportSubject = .complete()
var maxDurationItem: (Double, Subject.VideoCollageItem)?
for item in items {
switch item.content {
case .image:
break
case let .video(_, duration):
if let (maxDuration, _) = maxDurationItem {
if duration > maxDuration {
maxDurationItem = (duration, item)
}
} else {
maxDurationItem = (duration, item)
}
case let .asset(asset):
if let (maxDuration, _) = maxDurationItem {
if asset.duration > maxDuration {
maxDurationItem = (asset.duration, item)
}
} else {
maxDurationItem = (asset.duration, item)
}
}
}
guard let (_, mainItem) = maxDurationItem else {
fatalError()
}
let assetSignal: Signal<AVAsset, NoError>
switch mainItem.content {
case let .video(path, _):
assetSignal = .single(AVURLAsset(url: NSURL(fileURLWithPath: path) as URL))
case let .asset(asset):
assetSignal = Signal { subscriber in
PHImageManager.default().requestAVAsset(forVideo: asset, options: nil) { avAsset, _, _ in
if let avAsset {
subscriber.putNext(avAsset)
subscriber.putCompletion()
}
}
return EmptyDisposable
}
default:
fatalError()
}
exportSubject = assetSignal
|> map { asset in
return .video(asset: asset, isStory: true)
}
case let .image(image, _, _, _):
exportSubject = .single(.image(image: image))
case let .asset(asset):

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "gift (3).pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}