mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Video Editing
This commit is contained in:
parent
29b23c767f
commit
7741978b7e
@ -156,6 +156,7 @@ swift_library(
|
|||||||
":AppIntentVocabularyResources",
|
":AppIntentVocabularyResources",
|
||||||
":InfoPlistStringResources",
|
":InfoPlistStringResources",
|
||||||
"//submodules/LegacyComponents:LegacyComponentsResources",
|
"//submodules/LegacyComponents:LegacyComponentsResources",
|
||||||
|
"//submodules/LegacyComponents:LegacyComponentsAssets",
|
||||||
"//submodules/OverlayStatusController:OverlayStatusControllerResources",
|
"//submodules/OverlayStatusController:OverlayStatusControllerResources",
|
||||||
"//submodules/PasswordSetupUI:PasswordSetupUIResources",
|
"//submodules/PasswordSetupUI:PasswordSetupUIResources",
|
||||||
"//submodules/PasswordSetupUI:PasswordSetupUIAssets",
|
"//submodules/PasswordSetupUI:PasswordSetupUIAssets",
|
||||||
|
@ -60,12 +60,12 @@ public enum AnimatedStickerPlaybackMode {
|
|||||||
case still(AnimatedStickerPlaybackPosition)
|
case still(AnimatedStickerPlaybackPosition)
|
||||||
}
|
}
|
||||||
|
|
||||||
private final class AnimatedStickerFrame {
|
public final class AnimatedStickerFrame {
|
||||||
let data: Data
|
public let data: Data
|
||||||
let type: AnimationRendererFrameType
|
public let type: AnimationRendererFrameType
|
||||||
let width: Int
|
public let width: Int
|
||||||
let height: Int
|
public let height: Int
|
||||||
let bytesPerRow: Int
|
public let bytesPerRow: Int
|
||||||
let index: Int
|
let index: Int
|
||||||
let isLastFrame: Bool
|
let isLastFrame: Bool
|
||||||
|
|
||||||
@ -80,7 +80,7 @@ private final class AnimatedStickerFrame {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private protocol AnimatedStickerFrameSource: class {
|
public protocol AnimatedStickerFrameSource: class {
|
||||||
var frameRate: Int { get }
|
var frameRate: Int { get }
|
||||||
var frameCount: Int { get }
|
var frameCount: Int { get }
|
||||||
|
|
||||||
@ -97,7 +97,7 @@ private final class AnimatedStickerFrameSourceWrapper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@available(iOS 9.0, *)
|
@available(iOS 9.0, *)
|
||||||
private final class AnimatedStickerCachedFrameSource: AnimatedStickerFrameSource {
|
public final class AnimatedStickerCachedFrameSource: AnimatedStickerFrameSource {
|
||||||
private let queue: Queue
|
private let queue: Queue
|
||||||
private var data: Data
|
private var data: Data
|
||||||
private var dataComplete: Bool
|
private var dataComplete: Bool
|
||||||
@ -107,15 +107,15 @@ private final class AnimatedStickerCachedFrameSource: AnimatedStickerFrameSource
|
|||||||
let width: Int
|
let width: Int
|
||||||
let bytesPerRow: Int
|
let bytesPerRow: Int
|
||||||
let height: Int
|
let height: Int
|
||||||
let frameRate: Int
|
public let frameRate: Int
|
||||||
let frameCount: Int
|
public let frameCount: Int
|
||||||
private var frameIndex: Int
|
private var frameIndex: Int
|
||||||
private let initialOffset: Int
|
private let initialOffset: Int
|
||||||
private var offset: Int
|
private var offset: Int
|
||||||
var decodeBuffer: Data
|
var decodeBuffer: Data
|
||||||
var frameBuffer: Data
|
var frameBuffer: Data
|
||||||
|
|
||||||
init?(queue: Queue, data: Data, complete: Bool, notifyUpdated: @escaping () -> Void) {
|
public init?(queue: Queue, data: Data, complete: Bool, notifyUpdated: @escaping () -> Void) {
|
||||||
self.queue = queue
|
self.queue = queue
|
||||||
self.data = data
|
self.data = data
|
||||||
self.dataComplete = complete
|
self.dataComplete = complete
|
||||||
@ -179,7 +179,7 @@ private final class AnimatedStickerCachedFrameSource: AnimatedStickerFrameSource
|
|||||||
assert(self.queue.isCurrent())
|
assert(self.queue.isCurrent())
|
||||||
}
|
}
|
||||||
|
|
||||||
func takeFrame() -> AnimatedStickerFrame? {
|
public func takeFrame() -> AnimatedStickerFrame? {
|
||||||
var frameData: Data?
|
var frameData: Data?
|
||||||
var isLastFrame = false
|
var isLastFrame = false
|
||||||
|
|
||||||
@ -259,7 +259,7 @@ private final class AnimatedStickerCachedFrameSource: AnimatedStickerFrameSource
|
|||||||
self.dataComplete = complete
|
self.dataComplete = complete
|
||||||
}
|
}
|
||||||
|
|
||||||
func skipToEnd() {
|
public func skipToEnd() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -310,13 +310,13 @@ private final class AnimatedStickerDirectFrameSource: AnimatedStickerFrameSource
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private final class AnimatedStickerFrameQueue {
|
public final class AnimatedStickerFrameQueue {
|
||||||
private let queue: Queue
|
private let queue: Queue
|
||||||
private let length: Int
|
private let length: Int
|
||||||
private let source: AnimatedStickerFrameSource
|
private let source: AnimatedStickerFrameSource
|
||||||
private var frames: [AnimatedStickerFrame] = []
|
private var frames: [AnimatedStickerFrame] = []
|
||||||
|
|
||||||
init(queue: Queue, length: Int, source: AnimatedStickerFrameSource) {
|
public init(queue: Queue, length: Int, source: AnimatedStickerFrameSource) {
|
||||||
self.queue = queue
|
self.queue = queue
|
||||||
self.length = length
|
self.length = length
|
||||||
self.source = source
|
self.source = source
|
||||||
@ -326,7 +326,7 @@ private final class AnimatedStickerFrameQueue {
|
|||||||
assert(self.queue.isCurrent())
|
assert(self.queue.isCurrent())
|
||||||
}
|
}
|
||||||
|
|
||||||
func take() -> AnimatedStickerFrame? {
|
public func take() -> AnimatedStickerFrame? {
|
||||||
if self.frames.isEmpty {
|
if self.frames.isEmpty {
|
||||||
if let frame = self.source.takeFrame() {
|
if let frame = self.source.takeFrame() {
|
||||||
self.frames.append(frame)
|
self.frames.append(frame)
|
||||||
@ -340,7 +340,7 @@ private final class AnimatedStickerFrameQueue {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func generateFramesIfNeeded() {
|
public func generateFramesIfNeeded() {
|
||||||
if self.frames.isEmpty {
|
if self.frames.isEmpty {
|
||||||
if let frame = self.source.takeFrame() {
|
if let frame = self.source.takeFrame() {
|
||||||
self.frames.append(frame)
|
self.frames.append(frame)
|
||||||
|
@ -2,7 +2,7 @@ import Foundation
|
|||||||
import SwiftSignalKit
|
import SwiftSignalKit
|
||||||
import AsyncDisplayKit
|
import AsyncDisplayKit
|
||||||
|
|
||||||
enum AnimationRendererFrameType {
|
public enum AnimationRendererFrameType {
|
||||||
case argb
|
case argb
|
||||||
case yuva
|
case yuva
|
||||||
}
|
}
|
||||||
|
@ -7,6 +7,13 @@ filegroup(
|
|||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
filegroup(
|
||||||
|
name = "LegacyComponentsAssets",
|
||||||
|
srcs = glob(["LegacyImages.xcassets/**"]),
|
||||||
|
visibility = ["//visibility:public"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
objc_library(
|
objc_library(
|
||||||
name = "LegacyComponents",
|
name = "LegacyComponents",
|
||||||
enable_modules = True,
|
enable_modules = True,
|
||||||
|
@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"info" : {
|
||||||
|
"version" : 1,
|
||||||
|
"author" : "xcode"
|
||||||
|
}
|
||||||
|
}
|
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/AddSticker.imageset/Contents.json
vendored
Normal file
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/AddSticker.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"images" : [
|
||||||
|
{
|
||||||
|
"filename" : "ic_editor_addsticker.pdf",
|
||||||
|
"idiom" : "universal"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
Binary file not shown.
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/AddText.imageset/Contents.json
vendored
Normal file
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/AddText.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"images" : [
|
||||||
|
{
|
||||||
|
"filename" : "ic_editor_addtext.pdf",
|
||||||
|
"idiom" : "universal"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/AddText.imageset/ic_editor_addtext.pdf
vendored
Normal file
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/AddText.imageset/ic_editor_addtext.pdf
vendored
Normal file
Binary file not shown.
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Adjustments.imageset/Contents.json
vendored
Normal file
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Adjustments.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"images" : [
|
||||||
|
{
|
||||||
|
"filename" : "ic_editor_tools.pdf",
|
||||||
|
"idiom" : "universal"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Adjustments.imageset/ic_editor_tools.pdf
vendored
Normal file
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Adjustments.imageset/ic_editor_tools.pdf
vendored
Normal file
Binary file not shown.
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/AspectRatio.imageset/Contents.json
vendored
Normal file
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/AspectRatio.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"images" : [
|
||||||
|
{
|
||||||
|
"filename" : "ic_editor_frame.pdf",
|
||||||
|
"idiom" : "universal"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/AspectRatio.imageset/ic_editor_frame.pdf
vendored
Normal file
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/AspectRatio.imageset/ic_editor_frame.pdf
vendored
Normal file
Binary file not shown.
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Blur.imageset/Contents.json
vendored
Normal file
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Blur.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"images" : [
|
||||||
|
{
|
||||||
|
"filename" : "ic_editor_blur.pdf",
|
||||||
|
"idiom" : "universal"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Blur.imageset/ic_editor_blur.pdf
vendored
Normal file
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Blur.imageset/ic_editor_blur.pdf
vendored
Normal file
Binary file not shown.
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Brush.imageset/Contents.json
vendored
Normal file
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Brush.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"images" : [
|
||||||
|
{
|
||||||
|
"filename" : "ic_editor_brushtype.pdf",
|
||||||
|
"idiom" : "universal"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Brush.imageset/ic_editor_brushtype.pdf
vendored
Normal file
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Brush.imageset/ic_editor_brushtype.pdf
vendored
Normal file
Binary file not shown.
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Cancel.imageset/Contents.json
vendored
Normal file
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Cancel.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"images" : [
|
||||||
|
{
|
||||||
|
"filename" : "ic_editor_close (2).pdf",
|
||||||
|
"idiom" : "universal"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Cancel.imageset/ic_editor_close (2).pdf
vendored
Normal file
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Cancel.imageset/ic_editor_close (2).pdf
vendored
Normal file
Binary file not shown.
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Commit.imageset/Contents.json
vendored
Normal file
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Commit.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"images" : [
|
||||||
|
{
|
||||||
|
"filename" : "ic_editor_check (2).pdf",
|
||||||
|
"idiom" : "universal"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Commit.imageset/ic_editor_check (2).pdf
vendored
Normal file
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Commit.imageset/ic_editor_check (2).pdf
vendored
Normal file
Binary file not shown.
@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
},
|
||||||
|
"properties" : {
|
||||||
|
"provides-namespace" : true
|
||||||
|
}
|
||||||
|
}
|
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Crop.imageset/Contents.json
vendored
Normal file
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Crop.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"images" : [
|
||||||
|
{
|
||||||
|
"filename" : "ic_editor_crop.pdf",
|
||||||
|
"idiom" : "universal"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Crop.imageset/ic_editor_crop.pdf
vendored
Normal file
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Crop.imageset/ic_editor_crop.pdf
vendored
Normal file
Binary file not shown.
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Curves.imageset/Contents.json
vendored
Normal file
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Curves.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"images" : [
|
||||||
|
{
|
||||||
|
"filename" : "ic_editor_curves.pdf",
|
||||||
|
"idiom" : "universal"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Curves.imageset/ic_editor_curves.pdf
vendored
Normal file
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Curves.imageset/ic_editor_curves.pdf
vendored
Normal file
Binary file not shown.
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Drawing.imageset/Contents.json
vendored
Normal file
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Drawing.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"images" : [
|
||||||
|
{
|
||||||
|
"filename" : "ic_editor_brush.pdf",
|
||||||
|
"idiom" : "universal"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Drawing.imageset/ic_editor_brush.pdf
vendored
Normal file
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Drawing.imageset/ic_editor_brush.pdf
vendored
Normal file
Binary file not shown.
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Eraser.imageset/Contents.json
vendored
Normal file
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Eraser.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"images" : [
|
||||||
|
{
|
||||||
|
"filename" : "ic_editor_eracer.pdf",
|
||||||
|
"idiom" : "universal"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Eraser.imageset/ic_editor_eracer.pdf
vendored
Normal file
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Eraser.imageset/ic_editor_eracer.pdf
vendored
Normal file
Binary file not shown.
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Eyedropper.imageset/Contents.json
vendored
Normal file
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Eyedropper.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"images" : [
|
||||||
|
{
|
||||||
|
"filename" : "ic_editor_eyedropper.pdf",
|
||||||
|
"idiom" : "universal"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
Binary file not shown.
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Flip.imageset/Contents.json
vendored
Normal file
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Flip.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"images" : [
|
||||||
|
{
|
||||||
|
"filename" : "ic_editor_flip.pdf",
|
||||||
|
"idiom" : "universal"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Flip.imageset/ic_editor_flip.pdf
vendored
Normal file
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Flip.imageset/ic_editor_flip.pdf
vendored
Normal file
Binary file not shown.
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Font.imageset/Contents.json
vendored
Normal file
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Font.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"images" : [
|
||||||
|
{
|
||||||
|
"filename" : "ic_editor_font.pdf",
|
||||||
|
"idiom" : "universal"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Font.imageset/ic_editor_font.pdf
vendored
Normal file
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Font.imageset/ic_editor_font.pdf
vendored
Normal file
Binary file not shown.
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Mute.imageset/Contents.json
vendored
Normal file
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Mute.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"images" : [
|
||||||
|
{
|
||||||
|
"filename" : "ic_editor_muted.pdf",
|
||||||
|
"idiom" : "universal"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Mute.imageset/ic_editor_muted.pdf
vendored
Normal file
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Mute.imageset/ic_editor_muted.pdf
vendored
Normal file
Binary file not shown.
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Play.imageset/Contents.json
vendored
Normal file
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Play.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"images" : [
|
||||||
|
{
|
||||||
|
"filename" : "ic_editor_play.pdf",
|
||||||
|
"idiom" : "universal"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Play.imageset/ic_editor_play.pdf
vendored
Normal file
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Play.imageset/ic_editor_play.pdf
vendored
Normal file
Binary file not shown.
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Recipient.imageset/Contents.json
vendored
Normal file
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Recipient.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"images" : [
|
||||||
|
{
|
||||||
|
"filename" : "send.pdf",
|
||||||
|
"idiom" : "universal"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Recipient.imageset/send.pdf
vendored
Normal file
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Recipient.imageset/send.pdf
vendored
Normal file
Binary file not shown.
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Rotate.imageset/Contents.json
vendored
Normal file
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Rotate.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"images" : [
|
||||||
|
{
|
||||||
|
"filename" : "ic_editor_rotate.pdf",
|
||||||
|
"idiom" : "universal"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Rotate.imageset/ic_editor_rotate.pdf
vendored
Normal file
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Rotate.imageset/ic_editor_rotate.pdf
vendored
Normal file
Binary file not shown.
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Tint.imageset/Contents.json
vendored
Normal file
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Tint.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"images" : [
|
||||||
|
{
|
||||||
|
"filename" : "ic_editor_tint.pdf",
|
||||||
|
"idiom" : "universal"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Tint.imageset/ic_editor_tint.pdf
vendored
Normal file
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Tint.imageset/ic_editor_tint.pdf
vendored
Normal file
Binary file not shown.
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Undo.imageset/Contents.json
vendored
Normal file
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Undo.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"images" : [
|
||||||
|
{
|
||||||
|
"filename" : "ic_editor_undo.pdf",
|
||||||
|
"idiom" : "universal"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Undo.imageset/ic_editor_undo.pdf
vendored
Normal file
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Undo.imageset/ic_editor_undo.pdf
vendored
Normal file
Binary file not shown.
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Unmute.imageset/Contents.json
vendored
Normal file
12
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Unmute.imageset/Contents.json
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"images" : [
|
||||||
|
{
|
||||||
|
"filename" : "ic_editor_unmuted.pdf",
|
||||||
|
"idiom" : "universal"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"info" : {
|
||||||
|
"author" : "xcode",
|
||||||
|
"version" : 1
|
||||||
|
}
|
||||||
|
}
|
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Unmute.imageset/ic_editor_unmuted.pdf
vendored
Normal file
BIN
submodules/LegacyComponents/LegacyImages.xcassets/Editor/Unmute.imageset/ic_editor_unmuted.pdf
vendored
Normal file
Binary file not shown.
@ -264,6 +264,8 @@
|
|||||||
#import <LegacyComponents/TGPhotoMaskPosition.h>
|
#import <LegacyComponents/TGPhotoMaskPosition.h>
|
||||||
#import <LegacyComponents/TGPhotoPaintEntity.h>
|
#import <LegacyComponents/TGPhotoPaintEntity.h>
|
||||||
#import <LegacyComponents/TGPhotoPaintStickerEntity.h>
|
#import <LegacyComponents/TGPhotoPaintStickerEntity.h>
|
||||||
|
#import <LegacyComponents/TGPhotoPaintTextEntity.h>
|
||||||
|
#import <LegacyComponents/TGPhotoPaintStickersContext.h>
|
||||||
#import <LegacyComponents/TGPhotoToolbarView.h>
|
#import <LegacyComponents/TGPhotoToolbarView.h>
|
||||||
#import <LegacyComponents/TGPhotoVideoEditor.h>
|
#import <LegacyComponents/TGPhotoVideoEditor.h>
|
||||||
#import <LegacyComponents/TGPluralization.h>
|
#import <LegacyComponents/TGPluralization.h>
|
||||||
|
@ -10,6 +10,7 @@
|
|||||||
@class TGViewController;
|
@class TGViewController;
|
||||||
@class TGAttachmentCameraView;
|
@class TGAttachmentCameraView;
|
||||||
@protocol TGModernGalleryTransitionHostScrollView;
|
@protocol TGModernGalleryTransitionHostScrollView;
|
||||||
|
@protocol TGPhotoPaintStickersContext;
|
||||||
|
|
||||||
@interface TGAttachmentCarouselCollectionView : UICollectionView
|
@interface TGAttachmentCarouselCollectionView : UICollectionView
|
||||||
|
|
||||||
@ -22,6 +23,7 @@
|
|||||||
@property (nonatomic, readonly) TGMediaSelectionContext *selectionContext;
|
@property (nonatomic, readonly) TGMediaSelectionContext *selectionContext;
|
||||||
@property (nonatomic, readonly) TGMediaEditingContext *editingContext;
|
@property (nonatomic, readonly) TGMediaEditingContext *editingContext;
|
||||||
@property (nonatomic, strong) TGSuggestionContext *suggestionContext;
|
@property (nonatomic, strong) TGSuggestionContext *suggestionContext;
|
||||||
|
@property (nonatomic, strong) id<TGPhotoPaintStickersContext> stickersContext;
|
||||||
@property (nonatomic) bool allowCaptions;
|
@property (nonatomic) bool allowCaptions;
|
||||||
@property (nonatomic) bool allowCaptionEntities;
|
@property (nonatomic) bool allowCaptionEntities;
|
||||||
@property (nonatomic) bool inhibitDocumentCaptions;
|
@property (nonatomic) bool inhibitDocumentCaptions;
|
||||||
|
@ -9,6 +9,8 @@
|
|||||||
@class TGMediaAssetsPickerController;
|
@class TGMediaAssetsPickerController;
|
||||||
@class TGViewController;
|
@class TGViewController;
|
||||||
|
|
||||||
|
@protocol TGPhotoPaintStickersContext;
|
||||||
|
|
||||||
typedef enum
|
typedef enum
|
||||||
{
|
{
|
||||||
TGMediaAssetsControllerSendMediaIntent,
|
TGMediaAssetsControllerSendMediaIntent,
|
||||||
@ -49,6 +51,7 @@ typedef enum
|
|||||||
@property (nonatomic, readonly) TGMediaEditingContext *editingContext;
|
@property (nonatomic, readonly) TGMediaEditingContext *editingContext;
|
||||||
@property (nonatomic, readonly) TGMediaSelectionContext *selectionContext;
|
@property (nonatomic, readonly) TGMediaSelectionContext *selectionContext;
|
||||||
@property (nonatomic, strong) TGSuggestionContext *suggestionContext;
|
@property (nonatomic, strong) TGSuggestionContext *suggestionContext;
|
||||||
|
@property (nonatomic, strong) id<TGPhotoPaintStickersContext> stickersContext;
|
||||||
@property (nonatomic, assign) bool localMediaCacheEnabled;
|
@property (nonatomic, assign) bool localMediaCacheEnabled;
|
||||||
@property (nonatomic, assign) bool captionsEnabled;
|
@property (nonatomic, assign) bool captionsEnabled;
|
||||||
@property (nonatomic, assign) bool allowCaptionEntities;
|
@property (nonatomic, assign) bool allowCaptionEntities;
|
||||||
|
@ -81,7 +81,8 @@
|
|||||||
- (SSignal *)timersUpdatedSignal;
|
- (SSignal *)timersUpdatedSignal;
|
||||||
|
|
||||||
- (UIImage *)paintingImageForItem:(NSObject<TGMediaEditableItem> *)item;
|
- (UIImage *)paintingImageForItem:(NSObject<TGMediaEditableItem> *)item;
|
||||||
- (bool)setPaintingData:(NSData *)data image:(UIImage *)image forItem:(NSObject<TGMediaEditableItem> *)item dataUrl:(NSURL **)dataOutUrl imageUrl:(NSURL **)imageOutUrl forVideo:(bool)video;
|
- (UIImage *)stillPaintingImageForItem:(NSObject<TGMediaEditableItem> *)item;
|
||||||
|
- (bool)setPaintingData:(NSData *)data image:(UIImage *)image stillImage:(UIImage *)image forItem:(NSObject<TGMediaEditableItem> *)item dataUrl:(NSURL **)dataOutUrl imageUrl:(NSURL **)imageOutUrl forVideo:(bool)video;
|
||||||
- (void)clearPaintingData;
|
- (void)clearPaintingData;
|
||||||
|
|
||||||
- (SSignal *)facesForItem:(NSObject<TGMediaEditableItem> *)item;
|
- (SSignal *)facesForItem:(NSObject<TGMediaEditableItem> *)item;
|
||||||
|
@ -8,6 +8,8 @@
|
|||||||
@class TGMediaPickerSelectionGestureRecognizer;
|
@class TGMediaPickerSelectionGestureRecognizer;
|
||||||
@class TGMediaAssetsPallete;
|
@class TGMediaAssetsPallete;
|
||||||
|
|
||||||
|
@protocol TGPhotoPaintStickersContext;
|
||||||
|
|
||||||
@interface TGMediaPickerController : TGViewController <UICollectionViewDataSource, UICollectionViewDelegate, UICollectionViewDelegateFlowLayout>
|
@interface TGMediaPickerController : TGViewController <UICollectionViewDataSource, UICollectionViewDelegate, UICollectionViewDelegateFlowLayout>
|
||||||
{
|
{
|
||||||
TGMediaPickerLayoutMetrics *_layoutMetrics;
|
TGMediaPickerLayoutMetrics *_layoutMetrics;
|
||||||
@ -18,6 +20,7 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
@property (nonatomic, strong) TGSuggestionContext *suggestionContext;
|
@property (nonatomic, strong) TGSuggestionContext *suggestionContext;
|
||||||
|
@property (nonatomic, strong) id<TGPhotoPaintStickersContext> stickersContext;
|
||||||
@property (nonatomic, assign) bool localMediaCacheEnabled;
|
@property (nonatomic, assign) bool localMediaCacheEnabled;
|
||||||
@property (nonatomic, assign) bool captionsEnabled;
|
@property (nonatomic, assign) bool captionsEnabled;
|
||||||
@property (nonatomic, assign) bool allowCaptionEntities;
|
@property (nonatomic, assign) bool allowCaptionEntities;
|
||||||
|
@ -15,6 +15,8 @@
|
|||||||
@class TGMediaSelectionContext;
|
@class TGMediaSelectionContext;
|
||||||
@protocol TGMediaSelectableItem;
|
@protocol TGMediaSelectableItem;
|
||||||
|
|
||||||
|
@protocol TGPhotoPaintStickersContext;
|
||||||
|
|
||||||
@class TGSuggestionContext;
|
@class TGSuggestionContext;
|
||||||
|
|
||||||
@interface TGMediaPickerGalleryModel : TGModernGalleryModel
|
@interface TGMediaPickerGalleryModel : TGModernGalleryModel
|
||||||
@ -44,6 +46,7 @@
|
|||||||
|
|
||||||
@property (nonatomic, readonly) TGMediaSelectionContext *selectionContext;
|
@property (nonatomic, readonly) TGMediaSelectionContext *selectionContext;
|
||||||
@property (nonatomic, strong) TGSuggestionContext *suggestionContext;
|
@property (nonatomic, strong) TGSuggestionContext *suggestionContext;
|
||||||
|
@property (nonatomic, strong) id<TGPhotoPaintStickersContext> stickersContext;
|
||||||
|
|
||||||
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context items:(NSArray *)items focusItem:(id<TGModernGalleryItem>)focusItem selectionContext:(TGMediaSelectionContext *)selectionContext editingContext:(TGMediaEditingContext *)editingContext hasCaptions:(bool)hasCaptions allowCaptionEntities:(bool)allowCaptionEntities hasTimer:(bool)hasTimer onlyCrop:(bool)onlyCrop inhibitDocumentCaptions:(bool)inhibitDocumentCaptions hasSelectionPanel:(bool)hasSelectionPanel hasCamera:(bool)hasCamera recipientName:(NSString *)recipientName;
|
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context items:(NSArray *)items focusItem:(id<TGModernGalleryItem>)focusItem selectionContext:(TGMediaSelectionContext *)selectionContext editingContext:(TGMediaEditingContext *)editingContext hasCaptions:(bool)hasCaptions allowCaptionEntities:(bool)allowCaptionEntities hasTimer:(bool)hasTimer onlyCrop:(bool)onlyCrop inhibitDocumentCaptions:(bool)inhibitDocumentCaptions hasSelectionPanel:(bool)hasSelectionPanel hasCamera:(bool)hasCamera recipientName:(NSString *)recipientName;
|
||||||
|
|
||||||
|
@ -11,6 +11,8 @@
|
|||||||
@class TGMediaAssetFetchResult;
|
@class TGMediaAssetFetchResult;
|
||||||
@class TGMediaAssetMomentList;
|
@class TGMediaAssetMomentList;
|
||||||
|
|
||||||
|
@protocol TGPhotoPaintStickersContext;
|
||||||
|
|
||||||
@interface TGMediaPickerModernGalleryMixin : NSObject
|
@interface TGMediaPickerModernGalleryMixin : NSObject
|
||||||
|
|
||||||
@property (nonatomic, weak, readonly) TGMediaPickerGalleryModel *galleryModel;
|
@property (nonatomic, weak, readonly) TGMediaPickerGalleryModel *galleryModel;
|
||||||
@ -29,9 +31,9 @@
|
|||||||
|
|
||||||
@property (nonatomic, copy) void (^presentScheduleController)(void (^)(int32_t));
|
@property (nonatomic, copy) void (^presentScheduleController)(void (^)(int32_t));
|
||||||
|
|
||||||
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context item:(id)item fetchResult:(TGMediaAssetFetchResult *)fetchResult parentController:(TGViewController *)parentController thumbnailImage:(UIImage *)thumbnailImage selectionContext:(TGMediaSelectionContext *)selectionContext editingContext:(TGMediaEditingContext *)editingContext suggestionContext:(TGSuggestionContext *)suggestionContext hasCaptions:(bool)hasCaptions allowCaptionEntities:(bool)allowCaptionEntities hasTimer:(bool)hasTimer onlyCrop:(bool)onlyCrop inhibitDocumentCaptions:(bool)inhibitDocumentCaptions inhibitMute:(bool)inhibitMute asFile:(bool)asFile itemsLimit:(NSUInteger)itemsLimit recipientName:(NSString *)recipientName hasSilentPosting:(bool)hasSilentPosting hasSchedule:(bool)hasSchedule reminder:(bool)reminder;
|
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context item:(id)item fetchResult:(TGMediaAssetFetchResult *)fetchResult parentController:(TGViewController *)parentController thumbnailImage:(UIImage *)thumbnailImage selectionContext:(TGMediaSelectionContext *)selectionContext editingContext:(TGMediaEditingContext *)editingContext suggestionContext:(TGSuggestionContext *)suggestionContext hasCaptions:(bool)hasCaptions allowCaptionEntities:(bool)allowCaptionEntities hasTimer:(bool)hasTimer onlyCrop:(bool)onlyCrop inhibitDocumentCaptions:(bool)inhibitDocumentCaptions inhibitMute:(bool)inhibitMute asFile:(bool)asFile itemsLimit:(NSUInteger)itemsLimit recipientName:(NSString *)recipientName hasSilentPosting:(bool)hasSilentPosting hasSchedule:(bool)hasSchedule reminder:(bool)reminder stickersContext:(id<TGPhotoPaintStickersContext>)stickersContext;
|
||||||
|
|
||||||
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context item:(id)item momentList:(TGMediaAssetMomentList *)momentList parentController:(TGViewController *)parentController thumbnailImage:(UIImage *)thumbnailImage selectionContext:(TGMediaSelectionContext *)selectionContext editingContext:(TGMediaEditingContext *)editingContext suggestionContext:(TGSuggestionContext *)suggestionContext hasCaptions:(bool)hasCaptions allowCaptionEntities:(bool)allowCaptionEntities hasTimer:(bool)hasTimer onlyCrop:(bool)onlyCrop inhibitDocumentCaptions:(bool)inhibitDocumentCaptions inhibitMute:(bool)inhibitMute asFile:(bool)asFile itemsLimit:(NSUInteger)itemsLimit hasSilentPosting:(bool)hasSilentPosting hasSchedule:(bool)hasSchedule reminder:(bool)reminder;
|
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context item:(id)item momentList:(TGMediaAssetMomentList *)momentList parentController:(TGViewController *)parentController thumbnailImage:(UIImage *)thumbnailImage selectionContext:(TGMediaSelectionContext *)selectionContext editingContext:(TGMediaEditingContext *)editingContext suggestionContext:(TGSuggestionContext *)suggestionContext hasCaptions:(bool)hasCaptions allowCaptionEntities:(bool)allowCaptionEntities hasTimer:(bool)hasTimer onlyCrop:(bool)onlyCrop inhibitDocumentCaptions:(bool)inhibitDocumentCaptions inhibitMute:(bool)inhibitMute asFile:(bool)asFile itemsLimit:(NSUInteger)itemsLimit hasSilentPosting:(bool)hasSilentPosting hasSchedule:(bool)hasSchedule reminder:(bool)reminder stickersContext:(id<TGPhotoPaintStickersContext>)stickersContext;
|
||||||
|
|
||||||
- (void)present;
|
- (void)present;
|
||||||
- (void)updateWithFetchResult:(TGMediaAssetFetchResult *)fetchResult;
|
- (void)updateWithFetchResult:(TGMediaAssetFetchResult *)fetchResult;
|
||||||
|
@ -12,11 +12,12 @@
|
|||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
|
@protocol TGPhotoPaintEntityRenderer;
|
||||||
|
|
||||||
@interface TGMediaVideoConverter : NSObject
|
@interface TGMediaVideoConverter : NSObject
|
||||||
|
|
||||||
+ (SSignal *)convertAVAsset:(AVAsset *)avAsset adjustments:(TGMediaVideoEditAdjustments *)adjustments watcher:(TGMediaVideoFileWatcher *)watcher;
|
+ (SSignal *)convertAVAsset:(AVAsset *)avAsset adjustments:(TGMediaVideoEditAdjustments *)adjustments watcher:(TGMediaVideoFileWatcher *)watcher entityRenderer:(id<TGPhotoPaintEntityRenderer>)entityRenderer;
|
||||||
+ (SSignal *)convertAVAsset:(AVAsset *)avAsset adjustments:(TGMediaVideoEditAdjustments *)adjustments watcher:(TGMediaVideoFileWatcher *)watcher inhibitAudio:(bool)inhibitAudio;
|
+ (SSignal *)convertAVAsset:(AVAsset *)avAsset adjustments:(TGMediaVideoEditAdjustments *)adjustments watcher:(TGMediaVideoFileWatcher *)watcher inhibitAudio:(bool)inhibitAudio entityRenderer:(id<TGPhotoPaintEntityRenderer>)entityRenderer;
|
||||||
+ (SSignal *)hashForAVAsset:(AVAsset *)avAsset adjustments:(TGMediaVideoEditAdjustments *)adjustments;
|
+ (SSignal *)hashForAVAsset:(AVAsset *)avAsset adjustments:(TGMediaVideoEditAdjustments *)adjustments;
|
||||||
|
|
||||||
+ (NSUInteger)estimatedSizeForPreset:(TGMediaVideoConversionPreset)preset duration:(NSTimeInterval)duration hasAudio:(bool)hasAudio;
|
+ (NSUInteger)estimatedSizeForPreset:(TGMediaVideoConversionPreset)preset duration:(NSTimeInterval)duration hasAudio:(bool)hasAudio;
|
||||||
|
@ -7,4 +7,6 @@
|
|||||||
@property (nonatomic, copy) void (^singleTapped)();
|
@property (nonatomic, copy) void (^singleTapped)();
|
||||||
@property (nonatomic, copy) void (^doubleTapped)(CGPoint point);
|
@property (nonatomic, copy) void (^doubleTapped)(CGPoint point);
|
||||||
|
|
||||||
|
- (instancetype)initWithFrame:(CGRect)frame hasDoubleTap:(bool)hasDoubleTap;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
@ -16,7 +16,11 @@
|
|||||||
@property (nonatomic, readonly) NSData *data;
|
@property (nonatomic, readonly) NSData *data;
|
||||||
@property (nonatomic, readonly) UIImage *image;
|
@property (nonatomic, readonly) UIImage *image;
|
||||||
|
|
||||||
+ (instancetype)dataWithPaintingData:(NSData *)data image:(UIImage *)image entities:(NSArray *)entities undoManager:(TGPaintUndoManager *)undoManager;
|
@property (nonatomic, readonly) UIImage *stillImage;
|
||||||
|
|
||||||
|
+ (instancetype)dataWithPaintingData:(NSData *)data image:(UIImage *)image stillImage:(UIImage *)stillImage entities:(NSArray *)entities undoManager:(TGPaintUndoManager *)undoManager;
|
||||||
|
|
||||||
|
+ (instancetype)dataWithPaintingImagePath:(NSString *)imagePath entities:(NSArray *)entities;
|
||||||
|
|
||||||
+ (instancetype)dataWithPaintingImagePath:(NSString *)imagePath;
|
+ (instancetype)dataWithPaintingImagePath:(NSString *)imagePath;
|
||||||
|
|
||||||
|
@ -11,6 +11,8 @@
|
|||||||
@class TGSuggestionContext;
|
@class TGSuggestionContext;
|
||||||
@class TGPhotoEditorController;
|
@class TGPhotoEditorController;
|
||||||
|
|
||||||
|
@protocol TGPhotoPaintStickersContext;
|
||||||
|
|
||||||
typedef enum {
|
typedef enum {
|
||||||
TGPhotoEditorControllerGenericIntent = 0,
|
TGPhotoEditorControllerGenericIntent = 0,
|
||||||
TGPhotoEditorControllerAvatarIntent = (1 << 0),
|
TGPhotoEditorControllerAvatarIntent = (1 << 0),
|
||||||
@ -24,6 +26,7 @@ typedef enum {
|
|||||||
|
|
||||||
@property (nonatomic, strong) TGSuggestionContext *suggestionContext;
|
@property (nonatomic, strong) TGSuggestionContext *suggestionContext;
|
||||||
@property (nonatomic, strong) TGMediaEditingContext *editingContext;
|
@property (nonatomic, strong) TGMediaEditingContext *editingContext;
|
||||||
|
@property (nonatomic, strong) id<TGPhotoPaintStickersContext> stickersContext;
|
||||||
|
|
||||||
@property (nonatomic, copy) UIView *(^beginTransitionIn)(CGRect *referenceFrame, UIView **parentView);
|
@property (nonatomic, copy) UIView *(^beginTransitionIn)(CGRect *referenceFrame, UIView **parentView);
|
||||||
@property (nonatomic, copy) void (^finishedTransitionIn)(void);
|
@property (nonatomic, copy) void (^finishedTransitionIn)(void);
|
||||||
|
@ -18,7 +18,6 @@
|
|||||||
|
|
||||||
+ (UIColor *)editorButtonSelectionBackgroundColor;
|
+ (UIColor *)editorButtonSelectionBackgroundColor;
|
||||||
|
|
||||||
+ (UIImage *)captionIcon;
|
|
||||||
+ (UIImage *)cropIcon;
|
+ (UIImage *)cropIcon;
|
||||||
+ (UIImage *)toolsIcon;
|
+ (UIImage *)toolsIcon;
|
||||||
+ (UIImage *)rotateIcon;
|
+ (UIImage *)rotateIcon;
|
||||||
|
@ -7,6 +7,7 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
@property (nonatomic, assign) NSInteger uuid;
|
@property (nonatomic, assign) NSInteger uuid;
|
||||||
|
@property (nonatomic, readonly) bool animated;
|
||||||
@property (nonatomic, assign) CGPoint position;
|
@property (nonatomic, assign) CGPoint position;
|
||||||
@property (nonatomic, assign) CGFloat angle;
|
@property (nonatomic, assign) CGFloat angle;
|
||||||
@property (nonatomic, assign) CGFloat scale;
|
@property (nonatomic, assign) CGFloat scale;
|
||||||
|
@ -4,11 +4,11 @@
|
|||||||
|
|
||||||
@interface TGPhotoPaintStickerEntity : TGPhotoPaintEntity
|
@interface TGPhotoPaintStickerEntity : TGPhotoPaintEntity
|
||||||
|
|
||||||
@property (nonatomic, readonly) TGDocumentMediaAttachment *document;
|
@property (nonatomic, readonly) NSData *document;
|
||||||
@property (nonatomic, readonly) NSString *emoji;
|
@property (nonatomic, readonly) NSString *emoji;
|
||||||
@property (nonatomic, readonly) CGSize baseSize;
|
@property (nonatomic, readonly) CGSize baseSize;
|
||||||
|
|
||||||
- (instancetype)initWithDocument:(TGDocumentMediaAttachment *)document baseSize:(CGSize)baseSize;
|
- (instancetype)initWithDocument:(id)document baseSize:(CGSize)baseSize animated:(bool)animated;
|
||||||
- (instancetype)initWithEmoji:(NSString *)emoji;
|
- (instancetype)initWithEmoji:(NSString *)emoji;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
@ -0,0 +1,25 @@
|
|||||||
|
#import <Foundation/Foundation.h>
|
||||||
|
#import <UIKit/UIKit.h>
|
||||||
|
#import <CoreMedia/CoreMedia.h>
|
||||||
|
|
||||||
|
@class TGPaintingData;
|
||||||
|
|
||||||
|
@protocol TGPhotoPaintEntityRenderer <NSObject>
|
||||||
|
|
||||||
|
- (void)entitiesForTime:(CMTime)time size:(CGSize)size completion:(void(^)(NSArray<CIImage *> *))completion;
|
||||||
|
|
||||||
|
@end
|
||||||
|
|
||||||
|
@protocol TGPhotoPaintStickerRenderView <NSObject>
|
||||||
|
|
||||||
|
- (UIImage *)image;
|
||||||
|
|
||||||
|
@end
|
||||||
|
|
||||||
|
@protocol TGPhotoPaintStickersContext <NSObject>
|
||||||
|
|
||||||
|
- (UIView<TGPhotoPaintStickerRenderView> *)stickerViewForDocument:(id)document;
|
||||||
|
|
||||||
|
@property (nonatomic, copy) void(^presentStickersController)(void(^)(id, bool, UIView *, CGRect));
|
||||||
|
|
||||||
|
@end
|
@ -1,6 +1,7 @@
|
|||||||
#import <LegacyComponents/TGPhotoPaintEntity.h>
|
#import <LegacyComponents/TGPhotoPaintEntity.h>
|
||||||
#import "TGPaintSwatch.h"
|
|
||||||
#import "TGPhotoPaintFont.h"
|
@class TGPaintSwatch;
|
||||||
|
@class TGPhotoPaintFont;
|
||||||
|
|
||||||
@interface TGPhotoPaintTextEntity : TGPhotoPaintEntity
|
@interface TGPhotoPaintTextEntity : TGPhotoPaintEntity
|
||||||
|
|
||||||
@ -11,6 +12,8 @@
|
|||||||
@property (nonatomic, assign) CGFloat maxWidth;
|
@property (nonatomic, assign) CGFloat maxWidth;
|
||||||
@property (nonatomic, assign) bool stroke;
|
@property (nonatomic, assign) bool stroke;
|
||||||
|
|
||||||
|
@property (nonatomic, strong) UIImage *renderImage;
|
||||||
|
|
||||||
- (instancetype)initWithText:(NSString *)text font:(TGPhotoPaintFont *)font swatch:(TGPaintSwatch *)swatch baseFontSize:(CGFloat)baseFontSize maxWidth:(CGFloat)maxWidth stroke:(bool)stroke;
|
- (instancetype)initWithText:(NSString *)text font:(TGPhotoPaintFont *)font swatch:(TGPaintSwatch *)swatch baseFontSize:(CGFloat)baseFontSize maxWidth:(CGFloat)maxWidth stroke:(bool)stroke;
|
||||||
|
|
||||||
@end
|
@end
|
@ -3,9 +3,9 @@
|
|||||||
typedef NS_OPTIONS(NSUInteger, TGPhotoEditorTab) {
|
typedef NS_OPTIONS(NSUInteger, TGPhotoEditorTab) {
|
||||||
TGPhotoEditorNoneTab = 0,
|
TGPhotoEditorNoneTab = 0,
|
||||||
TGPhotoEditorCropTab = 1 << 0,
|
TGPhotoEditorCropTab = 1 << 0,
|
||||||
TGPhotoEditorStickerTab = 1 << 1,
|
TGPhotoEditorPaintTab = 1 << 1,
|
||||||
TGPhotoEditorPaintTab = 1 << 2,
|
TGPhotoEditorEraserTab = 1 << 2,
|
||||||
TGPhotoEditorEraserTab = 1 << 3,
|
TGPhotoEditorStickerTab = 1 << 3,
|
||||||
TGPhotoEditorTextTab = 1 << 4,
|
TGPhotoEditorTextTab = 1 << 4,
|
||||||
TGPhotoEditorToolsTab = 1 << 5,
|
TGPhotoEditorToolsTab = 1 << 5,
|
||||||
TGPhotoEditorRotateTab = 1 << 6,
|
TGPhotoEditorRotateTab = 1 << 6,
|
||||||
|
@ -57,4 +57,6 @@ typedef struct GPUTextureOptions {
|
|||||||
- (NSUInteger)bytesPerRow;
|
- (NSUInteger)bytesPerRow;
|
||||||
- (GLubyte *)byteBuffer;
|
- (GLubyte *)byteBuffer;
|
||||||
|
|
||||||
|
+ (void)setMark:(BOOL)mark;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
@ -33,13 +33,18 @@ void dataProviderUnlockCallback (void *info, const void *data, size_t size);
|
|||||||
#pragma mark -
|
#pragma mark -
|
||||||
#pragma mark Initialization and teardown
|
#pragma mark Initialization and teardown
|
||||||
|
|
||||||
|
static BOOL mark = false;
|
||||||
|
+ (void)setMark:(BOOL)mark_ {
|
||||||
|
mark = mark_;
|
||||||
|
}
|
||||||
|
|
||||||
- (id)initWithSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)fboTextureOptions onlyTexture:(BOOL)onlyGenerateTexture
|
- (id)initWithSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)fboTextureOptions onlyTexture:(BOOL)onlyGenerateTexture
|
||||||
{
|
{
|
||||||
if (!(self = [super init]))
|
if (!(self = [super init]))
|
||||||
{
|
{
|
||||||
return nil;
|
return nil;
|
||||||
}
|
}
|
||||||
|
_mark = mark;
|
||||||
_textureOptions = fboTextureOptions;
|
_textureOptions = fboTextureOptions;
|
||||||
_size = framebufferSize;
|
_size = framebufferSize;
|
||||||
framebufferReferenceCount = 0;
|
framebufferReferenceCount = 0;
|
||||||
@ -67,7 +72,7 @@ void dataProviderUnlockCallback (void *info, const void *data, size_t size);
|
|||||||
{
|
{
|
||||||
return nil;
|
return nil;
|
||||||
}
|
}
|
||||||
|
_mark = mark;
|
||||||
GPUTextureOptions defaultTextureOptions;
|
GPUTextureOptions defaultTextureOptions;
|
||||||
defaultTextureOptions.minFilter = GL_LINEAR;
|
defaultTextureOptions.minFilter = GL_LINEAR;
|
||||||
defaultTextureOptions.magFilter = GL_LINEAR;
|
defaultTextureOptions.magFilter = GL_LINEAR;
|
||||||
@ -89,6 +94,7 @@ void dataProviderUnlockCallback (void *info, const void *data, size_t size);
|
|||||||
|
|
||||||
- (id)initWithSize:(CGSize)framebufferSize
|
- (id)initWithSize:(CGSize)framebufferSize
|
||||||
{
|
{
|
||||||
|
_mark = mark;
|
||||||
GPUTextureOptions defaultTextureOptions;
|
GPUTextureOptions defaultTextureOptions;
|
||||||
defaultTextureOptions.minFilter = GL_LINEAR;
|
defaultTextureOptions.minFilter = GL_LINEAR;
|
||||||
defaultTextureOptions.magFilter = GL_LINEAR;
|
defaultTextureOptions.magFilter = GL_LINEAR;
|
||||||
@ -136,10 +142,8 @@ void dataProviderUnlockCallback (void *info, const void *data, size_t size);
|
|||||||
glGenFramebuffers(1, &framebuffer);
|
glGenFramebuffers(1, &framebuffer);
|
||||||
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
|
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
|
||||||
|
|
||||||
// By default, all framebuffers on iOS 5.0+ devices are backed by texture caches, using one shared cache
|
|
||||||
if ([GPUImageContext supportsFastTextureUpload])
|
if ([GPUImageContext supportsFastTextureUpload])
|
||||||
{
|
{
|
||||||
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
|
|
||||||
CVOpenGLESTextureCacheRef coreVideoTextureCache = [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache];
|
CVOpenGLESTextureCacheRef coreVideoTextureCache = [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache];
|
||||||
// Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/
|
// Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/
|
||||||
|
|
||||||
@ -180,7 +184,6 @@ void dataProviderUnlockCallback (void *info, const void *data, size_t size);
|
|||||||
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, _textureOptions.wrapT);
|
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, _textureOptions.wrapT);
|
||||||
|
|
||||||
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0);
|
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0);
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@ -275,8 +278,10 @@ void dataProviderUnlockCallback (void *info, const void *data, size_t size);
|
|||||||
if (framebufferReferenceCount < 1)
|
if (framebufferReferenceCount < 1)
|
||||||
{
|
{
|
||||||
[[GPUImageContext sharedFramebufferCache] returnFramebufferToCache:self];
|
[[GPUImageContext sharedFramebufferCache] returnFramebufferToCache:self];
|
||||||
} else if (framebufferReferenceCount == 1) {
|
[fixer invalidate];
|
||||||
fixer = [TGTimerTarget scheduledMainThreadTimerWithTarget:self action:@selector(fixTick) interval:0.3 repeat:false];
|
fixer = nil;
|
||||||
|
} else if (framebufferReferenceCount == 1 && self.mark) {
|
||||||
|
fixer = [TGTimerTarget scheduledMainThreadTimerWithTarget:self action:@selector(fixTick) interval:0.35 repeat:false];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -336,7 +341,6 @@ void dataProviderUnlockCallback (void *info, __unused const void *data, __unused
|
|||||||
CGDataProviderRef dataProvider = NULL;
|
CGDataProviderRef dataProvider = NULL;
|
||||||
if ([GPUImageContext supportsFastTextureUpload])
|
if ([GPUImageContext supportsFastTextureUpload])
|
||||||
{
|
{
|
||||||
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
|
|
||||||
NSUInteger paddedWidthOfImage = (NSUInteger)(CVPixelBufferGetBytesPerRow(renderTarget) / 4.0);
|
NSUInteger paddedWidthOfImage = (NSUInteger)(CVPixelBufferGetBytesPerRow(renderTarget) / 4.0);
|
||||||
NSUInteger paddedBytesForImage = paddedWidthOfImage * (int)_size.height * 4;
|
NSUInteger paddedBytesForImage = paddedWidthOfImage * (int)_size.height * 4;
|
||||||
|
|
||||||
@ -346,8 +350,6 @@ void dataProviderUnlockCallback (void *info, __unused const void *data, __unused
|
|||||||
rawImagePixels = (GLubyte *)CVPixelBufferGetBaseAddress(renderTarget);
|
rawImagePixels = (GLubyte *)CVPixelBufferGetBaseAddress(renderTarget);
|
||||||
dataProvider = CGDataProviderCreateWithData((__bridge_retained void*)self, rawImagePixels, paddedBytesForImage, dataProviderUnlockCallback);
|
dataProvider = CGDataProviderCreateWithData((__bridge_retained void*)self, rawImagePixels, paddedBytesForImage, dataProviderUnlockCallback);
|
||||||
[[GPUImageContext sharedFramebufferCache] addFramebufferToActiveImageCaptureList:self]; // In case the framebuffer is swapped out on the filter, need to have a strong reference to it somewhere for it to hang on while the image is in existence
|
[[GPUImageContext sharedFramebufferCache] addFramebufferToActiveImageCaptureList:self]; // In case the framebuffer is swapped out on the filter, need to have a strong reference to it somewhere for it to hang on while the image is in existence
|
||||||
#else
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@ -362,10 +364,7 @@ void dataProviderUnlockCallback (void *info, __unused const void *data, __unused
|
|||||||
|
|
||||||
if ([GPUImageContext supportsFastTextureUpload])
|
if ([GPUImageContext supportsFastTextureUpload])
|
||||||
{
|
{
|
||||||
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
|
|
||||||
cgImageFromBytes = CGImageCreate((int)_size.width, (int)_size.height, 8, 32, CVPixelBufferGetBytesPerRow(renderTarget), defaultRGBColorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst, dataProvider, NULL, NO, kCGRenderingIntentDefault);
|
cgImageFromBytes = CGImageCreate((int)_size.width, (int)_size.height, 8, 32, CVPixelBufferGetBytesPerRow(renderTarget), defaultRGBColorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst, dataProvider, NULL, NO, kCGRenderingIntentDefault);
|
||||||
#else
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
@ -94,7 +94,6 @@
|
|||||||
{
|
{
|
||||||
// Nothing in the cache, create a new framebuffer to use
|
// Nothing in the cache, create a new framebuffer to use
|
||||||
framebufferFromCache = [[GPUImageFramebuffer alloc] initWithSize:framebufferSize textureOptions:textureOptions onlyTexture:onlyTexture];
|
framebufferFromCache = [[GPUImageFramebuffer alloc] initWithSize:framebufferSize textureOptions:textureOptions onlyTexture:onlyTexture];
|
||||||
framebufferFromCache.mark = mark;
|
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@ -120,7 +119,6 @@
|
|||||||
if (framebufferFromCache == nil)
|
if (framebufferFromCache == nil)
|
||||||
{
|
{
|
||||||
framebufferFromCache = [[GPUImageFramebuffer alloc] initWithSize:framebufferSize textureOptions:textureOptions onlyTexture:onlyTexture];
|
framebufferFromCache = [[GPUImageFramebuffer alloc] initWithSize:framebufferSize textureOptions:textureOptions onlyTexture:onlyTexture];
|
||||||
framebufferFromCache.mark = mark;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -1,21 +1,7 @@
|
|||||||
#import "GPUImageContext.h"
|
#import "GPUImageContext.h"
|
||||||
#import "GPUImageFramebuffer.h"
|
#import "GPUImageFramebuffer.h"
|
||||||
|
|
||||||
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
|
|
||||||
#import <UIKit/UIKit.h>
|
#import <UIKit/UIKit.h>
|
||||||
#else
|
|
||||||
// For now, just redefine this on the Mac
|
|
||||||
typedef NS_ENUM(NSInteger, UIImageOrientation) {
|
|
||||||
UIImageOrientationUp, // default orientation
|
|
||||||
UIImageOrientationDown, // 180 deg rotation
|
|
||||||
UIImageOrientationLeft, // 90 deg CCW
|
|
||||||
UIImageOrientationRight, // 90 deg CW
|
|
||||||
UIImageOrientationUpMirrored, // as above but image mirrored along other axis. horizontal flip
|
|
||||||
UIImageOrientationDownMirrored, // horizontal flip
|
|
||||||
UIImageOrientationLeftMirrored, // vertical flip
|
|
||||||
UIImageOrientationRightMirrored, // vertical flip
|
|
||||||
};
|
|
||||||
#endif
|
|
||||||
|
|
||||||
void runOnMainQueueWithoutDeadlocking(void (^block)(void));
|
void runOnMainQueueWithoutDeadlocking(void (^block)(void));
|
||||||
void runSynchronouslyOnVideoProcessingQueue(void (^block)(void));
|
void runSynchronouslyOnVideoProcessingQueue(void (^block)(void));
|
||||||
@ -24,17 +10,6 @@ void runSynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void
|
|||||||
void runAsynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void));
|
void runAsynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void));
|
||||||
void reportAvailableMemoryForGPUImage(NSString *tag);
|
void reportAvailableMemoryForGPUImage(NSString *tag);
|
||||||
|
|
||||||
/** GPUImage's base source object
|
|
||||||
|
|
||||||
Images or frames of video are uploaded from source objects, which are subclasses of GPUImageOutput. These include:
|
|
||||||
|
|
||||||
- GPUImageVideoCamera (for live video from an iOS camera)
|
|
||||||
- GPUImageStillCamera (for taking photos with the camera)
|
|
||||||
- GPUImagePicture (for still images)
|
|
||||||
- GPUImageMovie (for movies)
|
|
||||||
|
|
||||||
Source objects upload still image frames to OpenGL ES as textures, then hand those textures off to the next objects in the processing chain.
|
|
||||||
*/
|
|
||||||
@interface GPUImageOutput : NSObject
|
@interface GPUImageOutput : NSObject
|
||||||
{
|
{
|
||||||
GPUImageFramebuffer *outputFramebuffer;
|
GPUImageFramebuffer *outputFramebuffer;
|
||||||
@ -56,7 +31,6 @@ void reportAvailableMemoryForGPUImage(NSString *tag);
|
|||||||
@property(nonatomic) BOOL enabled;
|
@property(nonatomic) BOOL enabled;
|
||||||
@property(readwrite, nonatomic) GPUTextureOptions outputTextureOptions;
|
@property(readwrite, nonatomic) GPUTextureOptions outputTextureOptions;
|
||||||
|
|
||||||
/// @name Managing targets
|
|
||||||
- (void)setInputFramebufferForTarget:(id<GPUImageInput>)target atIndex:(NSInteger)inputTextureIndex;
|
- (void)setInputFramebufferForTarget:(id<GPUImageInput>)target atIndex:(NSInteger)inputTextureIndex;
|
||||||
- (GPUImageFramebuffer *)framebufferForOutput;
|
- (GPUImageFramebuffer *)framebufferForOutput;
|
||||||
- (void)removeOutputFramebuffer;
|
- (void)removeOutputFramebuffer;
|
||||||
@ -106,15 +80,8 @@ void reportAvailableMemoryForGPUImage(NSString *tag);
|
|||||||
- (void)useNextFrameForImageCapture;
|
- (void)useNextFrameForImageCapture;
|
||||||
- (CGImageRef)newCGImageFromCurrentlyProcessedOutput;
|
- (CGImageRef)newCGImageFromCurrentlyProcessedOutput;
|
||||||
|
|
||||||
// Platform-specific image output methods
|
|
||||||
// If you're trying to use these methods, remember that you need to set -useNextFrameForImageCapture before running -processImage or running video and calling any of these methods, or you will get a nil image
|
|
||||||
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
|
|
||||||
- (UIImage *)imageFromCurrentFramebuffer;
|
- (UIImage *)imageFromCurrentFramebuffer;
|
||||||
- (UIImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation;
|
- (UIImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation;
|
||||||
#else
|
|
||||||
- (NSImage *)imageFromCurrentFramebuffer;
|
|
||||||
- (NSImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation;
|
|
||||||
#endif
|
|
||||||
|
|
||||||
- (BOOL)providesMonochromeOutput;
|
- (BOOL)providesMonochromeOutput;
|
||||||
|
|
||||||
|
16
submodules/LegacyComponents/Sources/GPUImageTextureInput.h
Executable file
16
submodules/LegacyComponents/Sources/GPUImageTextureInput.h
Executable file
@ -0,0 +1,16 @@
|
|||||||
|
#import "GPUImageOutput.h"
|
||||||
|
#import <CoreImage/CoreImage.h>
|
||||||
|
|
||||||
|
@interface GPUImageTextureInput : GPUImageOutput
|
||||||
|
{
|
||||||
|
CGSize textureSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (instancetype)initWithTexture:(GLuint)newInputTexture size:(CGSize)newTextureSize;
|
||||||
|
- (instancetype)initWithCIImage:(CIImage *)ciImage;
|
||||||
|
|
||||||
|
- (void)processTextureWithFrameTime:(CMTime)frameTime synchronous:(bool)synchronous;
|
||||||
|
|
||||||
|
- (CGSize)textureSize;
|
||||||
|
|
||||||
|
@end
|
94
submodules/LegacyComponents/Sources/GPUImageTextureInput.m
Executable file
94
submodules/LegacyComponents/Sources/GPUImageTextureInput.m
Executable file
@ -0,0 +1,94 @@
|
|||||||
|
#import "GPUImageTextureInput.h"
|
||||||
|
|
||||||
|
@implementation GPUImageTextureInput
|
||||||
|
|
||||||
|
#pragma mark -
|
||||||
|
#pragma mark Initialization and teardown
|
||||||
|
|
||||||
|
- (instancetype)initWithTexture:(GLuint)newInputTexture size:(CGSize)newTextureSize
|
||||||
|
{
|
||||||
|
if (!(self = [super init]))
|
||||||
|
{
|
||||||
|
return nil;
|
||||||
|
}
|
||||||
|
|
||||||
|
runSynchronouslyOnVideoProcessingQueue(^{
|
||||||
|
[GPUImageContext useImageProcessingContext];
|
||||||
|
});
|
||||||
|
|
||||||
|
textureSize = newTextureSize;
|
||||||
|
|
||||||
|
runSynchronouslyOnVideoProcessingQueue(^{
|
||||||
|
outputFramebuffer = [[GPUImageFramebuffer alloc] initWithSize:newTextureSize overriddenTexture:newInputTexture];
|
||||||
|
});
|
||||||
|
|
||||||
|
return self;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (instancetype)initWithCIImage:(CIImage *)ciImage
|
||||||
|
{
|
||||||
|
EAGLContext *context = [[GPUImageContext sharedImageProcessingContext] context];
|
||||||
|
[EAGLContext setCurrentContext:[[GPUImageContext sharedImageProcessingContext] context]];
|
||||||
|
|
||||||
|
GLsizei backingWidth = ciImage.extent.size.width;
|
||||||
|
GLsizei backingHeight = ciImage.extent.size.height;
|
||||||
|
GLuint outputTexture, defaultFramebuffer;
|
||||||
|
|
||||||
|
glActiveTexture(GL_TEXTURE0);
|
||||||
|
glGenTextures(1, &outputTexture);
|
||||||
|
glBindTexture(GL_TEXTURE_2D, outputTexture);
|
||||||
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||||
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||||
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||||
|
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||||
|
glBindTexture(GL_TEXTURE_2D, 0);
|
||||||
|
|
||||||
|
glActiveTexture(GL_TEXTURE1);
|
||||||
|
glGenFramebuffers(1, &defaultFramebuffer);
|
||||||
|
glBindFramebuffer(GL_FRAMEBUFFER, defaultFramebuffer);
|
||||||
|
|
||||||
|
glBindTexture(GL_TEXTURE_2D, outputTexture);
|
||||||
|
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, backingWidth, backingHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
|
||||||
|
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, outputTexture, 0);
|
||||||
|
|
||||||
|
NSAssert(glCheckFramebufferStatus(GL_FRAMEBUFFER) == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", glCheckFramebufferStatus(GL_FRAMEBUFFER));
|
||||||
|
|
||||||
|
glBindTexture(GL_TEXTURE_2D, 0);
|
||||||
|
|
||||||
|
ciImage = [ciImage imageByApplyingTransform:CGAffineTransformConcat(CGAffineTransformMakeScale(1.0f, -1.0f), CGAffineTransformMakeTranslation(0.0f, ciImage.extent.size.height))];
|
||||||
|
|
||||||
|
CIContext *ciContext = [CIContext contextWithEAGLContext:context options:@{kCIContextWorkingColorSpace: [NSNull null]}];
|
||||||
|
[ciContext drawImage:ciImage inRect:ciImage.extent fromRect:ciImage.extent];
|
||||||
|
|
||||||
|
if (self = [self initWithTexture:outputTexture size:ciImage.extent.size]) {
|
||||||
|
textureSize = ciImage.extent.size;
|
||||||
|
}
|
||||||
|
return self;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)processTextureWithFrameTime:(CMTime)frameTime synchronous:(bool)synchronous
|
||||||
|
{
|
||||||
|
void (^block)(void) = ^
|
||||||
|
{
|
||||||
|
for (id<GPUImageInput> currentTarget in targets)
|
||||||
|
{
|
||||||
|
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
|
||||||
|
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
|
||||||
|
|
||||||
|
[currentTarget setInputSize:textureSize atIndex:targetTextureIndex];
|
||||||
|
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
|
||||||
|
[currentTarget newFrameReadyAtTime:frameTime atIndex:targetTextureIndex];
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if (synchronous)
|
||||||
|
runSynchronouslyOnVideoProcessingQueue(block);
|
||||||
|
else
|
||||||
|
runAsynchronouslyOnVideoProcessingQueue(block);
|
||||||
|
}
|
||||||
|
|
||||||
|
- (CGSize)textureSize {
|
||||||
|
return textureSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
@end
|
24
submodules/LegacyComponents/Sources/GPUImageTextureOutput.h
Executable file
24
submodules/LegacyComponents/Sources/GPUImageTextureOutput.h
Executable file
@ -0,0 +1,24 @@
|
|||||||
|
#import <Foundation/Foundation.h>
|
||||||
|
#import <CoreImage/CoreImage.h>
|
||||||
|
#import "GPUImageContext.h"
|
||||||
|
|
||||||
|
@protocol GPUImageTextureOutputDelegate;
|
||||||
|
|
||||||
|
@interface GPUImageTextureOutput : NSObject <GPUImageInput>
|
||||||
|
{
|
||||||
|
GPUImageFramebuffer *firstInputFramebuffer;
|
||||||
|
}
|
||||||
|
|
||||||
|
@property(readwrite, unsafe_unretained, nonatomic) id<GPUImageTextureOutputDelegate> delegate;
|
||||||
|
@property(readonly) GLuint texture;
|
||||||
|
@property(nonatomic) BOOL enabled;
|
||||||
|
|
||||||
|
- (CIImage *)CIImageWithSize:(CGSize)size;
|
||||||
|
|
||||||
|
- (void)doneWithTexture;
|
||||||
|
|
||||||
|
@end
|
||||||
|
|
||||||
|
@protocol GPUImageTextureOutputDelegate
|
||||||
|
- (void)newFrameReadyFromTextureOutput:(GPUImageTextureOutput *)callbackTextureOutput;
|
||||||
|
@end
|
91
submodules/LegacyComponents/Sources/GPUImageTextureOutput.m
Executable file
91
submodules/LegacyComponents/Sources/GPUImageTextureOutput.m
Executable file
@ -0,0 +1,91 @@
|
|||||||
|
#import "GPUImageTextureOutput.h"
|
||||||
|
|
||||||
|
@implementation GPUImageTextureOutput
|
||||||
|
|
||||||
|
@synthesize delegate = _delegate;
|
||||||
|
@synthesize texture = _texture;
|
||||||
|
@synthesize enabled;
|
||||||
|
|
||||||
|
#pragma mark -
|
||||||
|
#pragma mark Initialization and teardown
|
||||||
|
|
||||||
|
- (id)init;
|
||||||
|
{
|
||||||
|
if (!(self = [super init]))
|
||||||
|
{
|
||||||
|
return nil;
|
||||||
|
}
|
||||||
|
|
||||||
|
self.enabled = YES;
|
||||||
|
|
||||||
|
return self;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)doneWithTexture;
|
||||||
|
{
|
||||||
|
[firstInputFramebuffer unlock];
|
||||||
|
}
|
||||||
|
|
||||||
|
#pragma mark -
|
||||||
|
#pragma mark GPUImageInput protocol
|
||||||
|
|
||||||
|
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
|
||||||
|
{
|
||||||
|
[_delegate newFrameReadyFromTextureOutput:self];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (NSInteger)nextAvailableTextureIndex;
|
||||||
|
{
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (CIImage *)CIImageWithSize:(CGSize)size
|
||||||
|
{
|
||||||
|
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
|
||||||
|
CIImage *image = [[CIImage alloc] initWithTexture:self.texture size:size flipped:true colorSpace:colorSpace];
|
||||||
|
CGColorSpaceRelease(colorSpace);
|
||||||
|
return image;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Deal with the fact that the texture changes regularly as a result of the caching
|
||||||
|
- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
|
||||||
|
{
|
||||||
|
firstInputFramebuffer = newInputFramebuffer;
|
||||||
|
[firstInputFramebuffer lock];
|
||||||
|
|
||||||
|
_texture = [firstInputFramebuffer texture];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
- (CGSize)maximumOutputSize;
|
||||||
|
{
|
||||||
|
return CGSizeZero;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)endProcessing
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
- (BOOL)shouldIgnoreUpdatesToThisTarget;
|
||||||
|
{
|
||||||
|
return NO;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (BOOL)wantsMonochromeInput;
|
||||||
|
{
|
||||||
|
return NO;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;
|
||||||
|
{
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@end
|
@ -152,5 +152,9 @@ UIImage *TGComponentsImageNamed(NSString *name) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
NSString *TGComponentsPathForResource(NSString *name, NSString *type) {
|
NSString *TGComponentsPathForResource(NSString *name, NSString *type) {
|
||||||
return [resourcesBundle() pathForResource:name ofType:type];
|
NSBundle *bundle = resourcesBundle();
|
||||||
|
if (bundle == nil) {
|
||||||
|
bundle = getAppBundle();
|
||||||
|
}
|
||||||
|
return [bundle pathForResource:name ofType:type];
|
||||||
}
|
}
|
||||||
|
@ -37,11 +37,13 @@
|
|||||||
|
|
||||||
- (void)setImage:(UIImage *)image forCropRect:(CGRect)cropRect cropRotation:(CGFloat)cropRotation cropOrientation:(UIImageOrientation)cropOrientation cropMirrored:(bool)cropMirrored fullSize:(bool)fullSize;
|
- (void)setImage:(UIImage *)image forCropRect:(CGRect)cropRect cropRotation:(CGFloat)cropRotation cropOrientation:(UIImageOrientation)cropOrientation cropMirrored:(bool)cropMirrored fullSize:(bool)fullSize;
|
||||||
- (void)setVideoAsset:(AVAsset *)asset;
|
- (void)setVideoAsset:(AVAsset *)asset;
|
||||||
|
- (void)setCIImage:(CIImage *)ciImage;
|
||||||
|
|
||||||
- (void)processAnimated:(bool)animated completion:(void (^)(void))completion;
|
- (void)processAnimated:(bool)animated completion:(void (^)(void))completion;
|
||||||
|
|
||||||
- (void)createResultImageWithCompletion:(void (^)(UIImage *image))completion;
|
- (void)createResultImageWithCompletion:(void (^)(UIImage *image))completion;
|
||||||
- (UIImage *)currentResultImage;
|
- (UIImage *)currentResultImage;
|
||||||
|
- (CIImage *)currentResultCIImage;
|
||||||
|
|
||||||
- (bool)hasDefaultCropping;
|
- (bool)hasDefaultCropping;
|
||||||
|
|
||||||
|
@ -10,6 +10,9 @@
|
|||||||
#import "PGPhotoEditorView.h"
|
#import "PGPhotoEditorView.h"
|
||||||
#import "PGPhotoEditorPicture.h"
|
#import "PGPhotoEditorPicture.h"
|
||||||
|
|
||||||
|
#import "GPUImageTextureInput.h"
|
||||||
|
#import "GPUImageTextureOutput.h"
|
||||||
|
|
||||||
#import <LegacyComponents/PGPhotoEditorValues.h>
|
#import <LegacyComponents/PGPhotoEditorValues.h>
|
||||||
#import <LegacyComponents/TGVideoEditAdjustments.h>
|
#import <LegacyComponents/TGVideoEditAdjustments.h>
|
||||||
#import <LegacyComponents/TGPaintingData.h>
|
#import <LegacyComponents/TGPaintingData.h>
|
||||||
@ -44,6 +47,8 @@
|
|||||||
NSArray *_currentProcessChain;
|
NSArray *_currentProcessChain;
|
||||||
GPUImageOutput <GPUImageInput> *_finalFilter;
|
GPUImageOutput <GPUImageInput> *_finalFilter;
|
||||||
|
|
||||||
|
GPUImageTextureOutput *_textureOutput;
|
||||||
|
|
||||||
PGPhotoHistogram *_currentHistogram;
|
PGPhotoHistogram *_currentHistogram;
|
||||||
PGPhotoHistogramGenerator *_histogramGenerator;
|
PGPhotoHistogramGenerator *_histogramGenerator;
|
||||||
|
|
||||||
@ -167,6 +172,21 @@
|
|||||||
_fullSize = true;
|
_fullSize = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (void)setCIImage:(CIImage *)ciImage {
|
||||||
|
[_toolComposer invalidate];
|
||||||
|
_currentProcessChain = nil;
|
||||||
|
|
||||||
|
[_currentInput removeAllTargets];
|
||||||
|
GPUImageTextureInput *input = [[GPUImageTextureInput alloc] initWithCIImage:ciImage];
|
||||||
|
_currentInput = input;
|
||||||
|
|
||||||
|
if (_textureOutput == nil) {
|
||||||
|
_textureOutput = [[GPUImageTextureOutput alloc] init];
|
||||||
|
}
|
||||||
|
|
||||||
|
_fullSize = true;
|
||||||
|
}
|
||||||
|
|
||||||
#pragma mark - Properties
|
#pragma mark - Properties
|
||||||
|
|
||||||
- (CGSize)rotatedCropSize
|
- (CGSize)rotatedCropSize
|
||||||
@ -201,7 +221,7 @@
|
|||||||
|
|
||||||
- (void)processAnimated:(bool)animated capture:(bool)capture synchronous:(bool)synchronous completion:(void (^)(void))completion
|
- (void)processAnimated:(bool)animated capture:(bool)capture synchronous:(bool)synchronous completion:(void (^)(void))completion
|
||||||
{
|
{
|
||||||
if (self.previewOutput == nil)
|
if (self.previewOutput == nil && ![_currentInput isKindOfClass:[GPUImageTextureInput class]])
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if (self.forVideo) {
|
if (self.forVideo) {
|
||||||
@ -210,7 +230,7 @@
|
|||||||
[self updateProcessChain];
|
[self updateProcessChain];
|
||||||
|
|
||||||
GPUImageOutput *currentInput = _currentInput;
|
GPUImageOutput *currentInput = _currentInput;
|
||||||
|
if ([currentInput isKindOfClass:[PGVideoMovie class]]) {
|
||||||
if (!_playing) {
|
if (!_playing) {
|
||||||
_playing = true;
|
_playing = true;
|
||||||
[_videoQueue dispatch:^{
|
[_videoQueue dispatch:^{
|
||||||
@ -219,7 +239,12 @@
|
|||||||
}
|
}
|
||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
}];
|
} else if ([currentInput isKindOfClass:[GPUImageTextureInput class]]) {
|
||||||
|
[(GPUImageTextureInput *)currentInput processTextureWithFrameTime:kCMTimeZero synchronous:synchronous];
|
||||||
|
if (completion != nil)
|
||||||
|
completion();
|
||||||
|
}
|
||||||
|
} synchronous:synchronous];
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -281,6 +306,8 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (void)updateProcessChain {
|
- (void)updateProcessChain {
|
||||||
|
[GPUImageFramebuffer setMark:self.forVideo];
|
||||||
|
|
||||||
NSMutableArray *processChain = [NSMutableArray array];
|
NSMutableArray *processChain = [NSMutableArray array];
|
||||||
|
|
||||||
for (PGPhotoTool *tool in _toolComposer.advancedTools)
|
for (PGPhotoTool *tool in _toolComposer.advancedTools)
|
||||||
@ -319,12 +346,19 @@
|
|||||||
}
|
}
|
||||||
_finalFilter = lastFilter;
|
_finalFilter = lastFilter;
|
||||||
|
|
||||||
[_finalFilter addTarget:previewOutput.imageView];
|
if (_textureOutput != nil) {
|
||||||
|
[_finalFilter addTarget:_textureOutput];
|
||||||
|
}
|
||||||
|
|
||||||
if (!self.forVideo)
|
if (previewOutput != nil) {
|
||||||
|
[_finalFilter addTarget:previewOutput.imageView];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!self.forVideo) {
|
||||||
[_finalFilter addTarget:_histogramGenerator];
|
[_finalFilter addTarget:_histogramGenerator];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#pragma mark - Result
|
#pragma mark - Result
|
||||||
|
|
||||||
@ -349,6 +383,18 @@
|
|||||||
return image;
|
return image;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (CIImage *)currentResultCIImage {
|
||||||
|
__block CIImage *image = nil;
|
||||||
|
GPUImageOutput *currentInput = _currentInput;
|
||||||
|
[self processAnimated:false capture:false synchronous:true completion:^
|
||||||
|
{
|
||||||
|
if ([currentInput isKindOfClass:[GPUImageTextureInput class]]) {
|
||||||
|
image = [_textureOutput CIImageWithSize:[(GPUImageTextureInput *)currentInput textureSize]];
|
||||||
|
}
|
||||||
|
}];
|
||||||
|
return image;
|
||||||
|
}
|
||||||
|
|
||||||
#pragma mark - Editor Values
|
#pragma mark - Editor Values
|
||||||
|
|
||||||
- (void)_importAdjustments:(id<TGMediaEditAdjustments>)adjustments
|
- (void)_importAdjustments:(id<TGMediaEditAdjustments>)adjustments
|
||||||
|
@ -14,9 +14,9 @@ GLfloat kColorConversion601FullRangeDefault[] = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
GLfloat kColorConversion709Default[] = {
|
GLfloat kColorConversion709Default[] = {
|
||||||
1.164, 1.164, 1.164,
|
1, 1, 1,
|
||||||
0.0, -0.213, 2.112,
|
0, -.21482, 2.12798,
|
||||||
1.793, -0.533, 0.0,
|
1.28033, -.38059, 0,
|
||||||
};
|
};
|
||||||
|
|
||||||
GLfloat *kColorConversion601 = kColorConversion601Default;
|
GLfloat *kColorConversion601 = kColorConversion601Default;
|
||||||
@ -124,7 +124,7 @@ NSString *const kYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRIN
|
|||||||
#pragma mark Initialization and teardown
|
#pragma mark Initialization and teardown
|
||||||
|
|
||||||
|
|
||||||
- (instancetype)initWithAsset:(AVAsset *)asset;
|
- (instancetype)initWithAsset:(AVAsset *)asset
|
||||||
{
|
{
|
||||||
if (!(self = [super init]))
|
if (!(self = [super init]))
|
||||||
{
|
{
|
||||||
@ -138,7 +138,7 @@ NSString *const kYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRIN
|
|||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)yuvConversionSetup;
|
- (void)yuvConversionSetup
|
||||||
{
|
{
|
||||||
if ([GPUImageContext supportsFastTextureUpload])
|
if ([GPUImageContext supportsFastTextureUpload])
|
||||||
{
|
{
|
||||||
@ -147,7 +147,7 @@ NSString *const kYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRIN
|
|||||||
|
|
||||||
_preferredConversion = kColorConversion709;
|
_preferredConversion = kColorConversion709;
|
||||||
isFullYUVRange = YES;
|
isFullYUVRange = YES;
|
||||||
yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kYUVFullRangeConversionForLAFragmentShaderString];
|
yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kYUVVideoRangeConversionForRGFragmentShaderString];
|
||||||
|
|
||||||
if (!yuvConversionProgram.initialized)
|
if (!yuvConversionProgram.initialized)
|
||||||
{
|
{
|
||||||
@ -224,7 +224,6 @@ NSString *const kYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRIN
|
|||||||
isFullYUVRange = NO;
|
isFullYUVRange = NO;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Maybe set alwaysCopiesSampleData to NO on iOS 5.0 for faster video decoding
|
|
||||||
AVAssetReaderTrackOutput *readerVideoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:[[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] outputSettings:outputSettings];
|
AVAssetReaderTrackOutput *readerVideoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:[[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] outputSettings:outputSettings];
|
||||||
readerVideoTrackOutput.alwaysCopiesSampleData = NO;
|
readerVideoTrackOutput.alwaysCopiesSampleData = NO;
|
||||||
[assetReader addOutput:readerVideoTrackOutput];
|
[assetReader addOutput:readerVideoTrackOutput];
|
||||||
@ -486,8 +485,6 @@ NSString *const kYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRIN
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
|
|
||||||
|
|
||||||
[GPUImageContext useImageProcessingContext];
|
[GPUImageContext useImageProcessingContext];
|
||||||
|
|
||||||
if ([GPUImageContext supportsFastTextureUpload])
|
if ([GPUImageContext supportsFastTextureUpload])
|
||||||
@ -510,7 +507,7 @@ NSString *const kYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRIN
|
|||||||
glActiveTexture(GL_TEXTURE4);
|
glActiveTexture(GL_TEXTURE4);
|
||||||
if ([GPUImageContext deviceSupportsRedTextures])
|
if ([GPUImageContext deviceSupportsRedTextures])
|
||||||
{
|
{
|
||||||
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
|
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_RED_EXT, bufferWidth, bufferHeight, GL_RED_EXT, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@ -531,7 +528,7 @@ NSString *const kYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRIN
|
|||||||
glActiveTexture(GL_TEXTURE5);
|
glActiveTexture(GL_TEXTURE5);
|
||||||
if ([GPUImageContext deviceSupportsRedTextures])
|
if ([GPUImageContext deviceSupportsRedTextures])
|
||||||
{
|
{
|
||||||
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
|
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_RG_EXT, bufferWidth/2, bufferHeight/2, GL_RG_EXT, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@ -556,6 +553,8 @@ NSString *const kYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRIN
|
|||||||
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
|
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
|
||||||
[currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
|
[currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
|
||||||
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
|
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
|
||||||
|
|
||||||
|
// [currentTarget setInputRotation:kGPUImageRotateLeft atIndex:targetTextureIndex];
|
||||||
}
|
}
|
||||||
|
|
||||||
[outputFramebuffer unlock];
|
[outputFramebuffer unlock];
|
||||||
|
@ -801,7 +801,7 @@ const NSUInteger TGAttachmentDisplayedAssetLimit = 500;
|
|||||||
if ([cell isKindOfClass:[TGAttachmentAssetCell class]])
|
if ([cell isKindOfClass:[TGAttachmentAssetCell class]])
|
||||||
thumbnailImage = cell.imageView.image;
|
thumbnailImage = cell.imageView.image;
|
||||||
|
|
||||||
TGMediaPickerModernGalleryMixin *mixin = [[TGMediaPickerModernGalleryMixin alloc] initWithContext:_context item:asset fetchResult:_fetchResult parentController:self.parentController thumbnailImage:thumbnailImage selectionContext:_selectionContext editingContext:_editingContext suggestionContext:self.suggestionContext hasCaptions:(_allowCaptions && !_forProfilePhoto) allowCaptionEntities:self.allowCaptionEntities hasTimer:self.hasTimer onlyCrop:self.onlyCrop inhibitDocumentCaptions:_inhibitDocumentCaptions inhibitMute:self.inhibitMute asFile:self.asFile itemsLimit:TGAttachmentDisplayedAssetLimit recipientName:self.recipientName hasSilentPosting:self.hasSilentPosting hasSchedule:self.hasSchedule reminder:self.reminder];
|
TGMediaPickerModernGalleryMixin *mixin = [[TGMediaPickerModernGalleryMixin alloc] initWithContext:_context item:asset fetchResult:_fetchResult parentController:self.parentController thumbnailImage:thumbnailImage selectionContext:_selectionContext editingContext:_editingContext suggestionContext:self.suggestionContext hasCaptions:(_allowCaptions && !_forProfilePhoto) allowCaptionEntities:self.allowCaptionEntities hasTimer:self.hasTimer onlyCrop:self.onlyCrop inhibitDocumentCaptions:_inhibitDocumentCaptions inhibitMute:self.inhibitMute asFile:self.asFile itemsLimit:TGAttachmentDisplayedAssetLimit recipientName:self.recipientName hasSilentPosting:self.hasSilentPosting hasSchedule:self.hasSchedule reminder:self.reminder stickersContext:self.stickersContext];
|
||||||
mixin.presentScheduleController = self.presentScheduleController;
|
mixin.presentScheduleController = self.presentScheduleController;
|
||||||
__weak TGAttachmentCarouselItemView *weakSelf = self;
|
__weak TGAttachmentCarouselItemView *weakSelf = self;
|
||||||
mixin.thumbnailSignalForItem = ^SSignal *(id item)
|
mixin.thumbnailSignalForItem = ^SSignal *(id item)
|
||||||
|
@ -135,7 +135,7 @@
|
|||||||
CGRect containerFrame = self.view.bounds;
|
CGRect containerFrame = self.view.bounds;
|
||||||
CGSize fittedSize = TGScaleToSize(_image.size, containerFrame.size);
|
CGSize fittedSize = TGScaleToSize(_image.size, containerFrame.size);
|
||||||
|
|
||||||
_scrollView = [[TGModernGalleryZoomableScrollView alloc] initWithFrame:self.view.bounds];
|
_scrollView = [[TGModernGalleryZoomableScrollView alloc] initWithFrame:self.view.bounds hasDoubleTap:true];
|
||||||
_scrollView.clipsToBounds = false;
|
_scrollView.clipsToBounds = false;
|
||||||
_scrollView.delegate = self;
|
_scrollView.delegate = self;
|
||||||
_scrollView.showsHorizontalScrollIndicator = false;
|
_scrollView.showsHorizontalScrollIndicator = false;
|
||||||
|
@ -42,6 +42,7 @@
|
|||||||
TGClipboardPreviewItemView *previewItem = [[TGClipboardPreviewItemView alloc] initWithContext:context images:images];
|
TGClipboardPreviewItemView *previewItem = [[TGClipboardPreviewItemView alloc] initWithContext:context images:images];
|
||||||
__weak TGClipboardPreviewItemView *weakPreviewItem = previewItem;
|
__weak TGClipboardPreviewItemView *weakPreviewItem = previewItem;
|
||||||
|
|
||||||
|
previewItem.suggestionContext = suggestionContext;
|
||||||
previewItem.parentController = parentController;
|
previewItem.parentController = parentController;
|
||||||
previewItem.allowCaptions = hasCaption;
|
previewItem.allowCaptions = hasCaption;
|
||||||
previewItem.hasTimer = hasTimer;
|
previewItem.hasTimer = hasTimer;
|
||||||
|
@ -117,7 +117,7 @@ UIFont *TGFixedSystemFontOfSize(CGFloat size)
|
|||||||
+ (UIFont *)roundedFontOfSize:(CGFloat)size
|
+ (UIFont *)roundedFontOfSize:(CGFloat)size
|
||||||
{
|
{
|
||||||
if (@available(iOSApplicationExtension 13.0, iOS 13.0, *)) {
|
if (@available(iOSApplicationExtension 13.0, iOS 13.0, *)) {
|
||||||
UIFontDescriptor *descriptor = [UIFont systemFontOfSize: size].fontDescriptor;
|
UIFontDescriptor *descriptor = [UIFont boldSystemFontOfSize: size].fontDescriptor;
|
||||||
descriptor = [descriptor fontDescriptorWithDesign:UIFontDescriptorSystemDesignRounded];
|
descriptor = [descriptor fontDescriptorWithDesign:UIFontDescriptorSystemDesignRounded];
|
||||||
return [UIFont fontWithDescriptor:descriptor size:size];
|
return [UIFont fontWithDescriptor:descriptor size:size];
|
||||||
} else {
|
} else {
|
||||||
|
@ -110,6 +110,7 @@
|
|||||||
pickerController.pallete = strongController.pallete;
|
pickerController.pallete = strongController.pallete;
|
||||||
}
|
}
|
||||||
pickerController.suggestionContext = strongController.suggestionContext;
|
pickerController.suggestionContext = strongController.suggestionContext;
|
||||||
|
pickerController.stickersContext = strongController.stickersContext;
|
||||||
pickerController.localMediaCacheEnabled = strongController.localMediaCacheEnabled;
|
pickerController.localMediaCacheEnabled = strongController.localMediaCacheEnabled;
|
||||||
pickerController.captionsEnabled = strongController.captionsEnabled;
|
pickerController.captionsEnabled = strongController.captionsEnabled;
|
||||||
pickerController.allowCaptionEntities = strongController.allowCaptionEntities;
|
pickerController.allowCaptionEntities = strongController.allowCaptionEntities;
|
||||||
@ -149,6 +150,12 @@
|
|||||||
self.pickerController.suggestionContext = suggestionContext;
|
self.pickerController.suggestionContext = suggestionContext;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (void)setStickersContext:(id<TGPhotoPaintStickersContext>)stickersContext
|
||||||
|
{
|
||||||
|
_stickersContext = stickersContext;
|
||||||
|
self.pickerController.stickersContext = stickersContext;
|
||||||
|
}
|
||||||
|
|
||||||
- (void)setCaptionsEnabled:(bool)captionsEnabled
|
- (void)setCaptionsEnabled:(bool)captionsEnabled
|
||||||
{
|
{
|
||||||
_captionsEnabled = captionsEnabled;
|
_captionsEnabled = captionsEnabled;
|
||||||
@ -875,7 +882,11 @@
|
|||||||
if ([adjustments cropAppliedForAvatar:false] || adjustments.hasPainting)
|
if ([adjustments cropAppliedForAvatar:false] || adjustments.hasPainting)
|
||||||
{
|
{
|
||||||
CGRect scaledCropRect = CGRectMake(adjustments.cropRect.origin.x * image.size.width / adjustments.originalSize.width, adjustments.cropRect.origin.y * image.size.height / adjustments.originalSize.height, adjustments.cropRect.size.width * image.size.width / adjustments.originalSize.width, adjustments.cropRect.size.height * image.size.height / adjustments.originalSize.height);
|
CGRect scaledCropRect = CGRectMake(adjustments.cropRect.origin.x * image.size.width / adjustments.originalSize.width, adjustments.cropRect.origin.y * image.size.height / adjustments.originalSize.height, adjustments.cropRect.size.width * image.size.width / adjustments.originalSize.width, adjustments.cropRect.size.height * image.size.height / adjustments.originalSize.height);
|
||||||
return TGPhotoEditorCrop(image, adjustments.paintingData.image, adjustments.cropOrientation, 0, scaledCropRect, adjustments.cropMirrored, targetSize, sourceSize, resize);
|
UIImage *paintingImage = adjustments.paintingData.stillImage;
|
||||||
|
if (paintingImage == nil) {
|
||||||
|
paintingImage = adjustments.paintingData.image;
|
||||||
|
}
|
||||||
|
return TGPhotoEditorCrop(image, paintingImage, adjustments.cropOrientation, 0, scaledCropRect, adjustments.cropMirrored, targetSize, sourceSize, resize);
|
||||||
}
|
}
|
||||||
|
|
||||||
return image;
|
return image;
|
||||||
|
@ -323,7 +323,7 @@
|
|||||||
|
|
||||||
- (TGMediaPickerModernGalleryMixin *)_galleryMixinForContext:(id<LegacyComponentsContext>)context item:(id)item thumbnailImage:(UIImage *)thumbnailImage selectionContext:(TGMediaSelectionContext *)selectionContext editingContext:(TGMediaEditingContext *)editingContext suggestionContext:(TGSuggestionContext *)suggestionContext hasCaptions:(bool)hasCaptions allowCaptionEntities:(bool)allowCaptionEntities inhibitDocumentCaptions:(bool)inhibitDocumentCaptions asFile:(bool)asFile
|
- (TGMediaPickerModernGalleryMixin *)_galleryMixinForContext:(id<LegacyComponentsContext>)context item:(id)item thumbnailImage:(UIImage *)thumbnailImage selectionContext:(TGMediaSelectionContext *)selectionContext editingContext:(TGMediaEditingContext *)editingContext suggestionContext:(TGSuggestionContext *)suggestionContext hasCaptions:(bool)hasCaptions allowCaptionEntities:(bool)allowCaptionEntities inhibitDocumentCaptions:(bool)inhibitDocumentCaptions asFile:(bool)asFile
|
||||||
{
|
{
|
||||||
return [[TGMediaPickerModernGalleryMixin alloc] initWithContext:context item:item fetchResult:_fetchResult parentController:self thumbnailImage:thumbnailImage selectionContext:selectionContext editingContext:editingContext suggestionContext:suggestionContext hasCaptions:hasCaptions allowCaptionEntities:allowCaptionEntities hasTimer:self.hasTimer onlyCrop:self.onlyCrop inhibitDocumentCaptions:inhibitDocumentCaptions inhibitMute:self.inhibitMute asFile:asFile itemsLimit:0 recipientName:self.recipientName hasSilentPosting:self.hasSilentPosting hasSchedule:self.hasSchedule reminder:self.reminder];
|
return [[TGMediaPickerModernGalleryMixin alloc] initWithContext:context item:item fetchResult:_fetchResult parentController:self thumbnailImage:thumbnailImage selectionContext:selectionContext editingContext:editingContext suggestionContext:suggestionContext hasCaptions:hasCaptions allowCaptionEntities:allowCaptionEntities hasTimer:self.hasTimer onlyCrop:self.onlyCrop inhibitDocumentCaptions:inhibitDocumentCaptions inhibitMute:self.inhibitMute asFile:asFile itemsLimit:0 recipientName:self.recipientName hasSilentPosting:self.hasSilentPosting hasSchedule:self.hasSchedule reminder:self.reminder stickersContext:self.stickersContext];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (TGMediaPickerModernGalleryMixin *)galleryMixinForIndexPath:(NSIndexPath *)indexPath previewMode:(bool)previewMode outAsset:(TGMediaAsset **)outAsset
|
- (TGMediaPickerModernGalleryMixin *)galleryMixinForIndexPath:(NSIndexPath *)indexPath previewMode:(bool)previewMode outAsset:(TGMediaAsset **)outAsset
|
||||||
|
@ -78,6 +78,7 @@
|
|||||||
TGMemoryImageCache *_thumbnailImageCache;
|
TGMemoryImageCache *_thumbnailImageCache;
|
||||||
|
|
||||||
TGMemoryImageCache *_paintingImageCache;
|
TGMemoryImageCache *_paintingImageCache;
|
||||||
|
TGMemoryImageCache *_stillPaintingImageCache;
|
||||||
|
|
||||||
TGMemoryImageCache *_originalImageCache;
|
TGMemoryImageCache *_originalImageCache;
|
||||||
TGMemoryImageCache *_originalThumbnailImageCache;
|
TGMemoryImageCache *_originalThumbnailImageCache;
|
||||||
@ -86,6 +87,7 @@
|
|||||||
NSURL *_fullSizeResultsUrl;
|
NSURL *_fullSizeResultsUrl;
|
||||||
NSURL *_paintingDatasUrl;
|
NSURL *_paintingDatasUrl;
|
||||||
NSURL *_paintingImagesUrl;
|
NSURL *_paintingImagesUrl;
|
||||||
|
NSURL *_stillPaintingImagesUrl;
|
||||||
NSURL *_videoPaintingImagesUrl;
|
NSURL *_videoPaintingImagesUrl;
|
||||||
|
|
||||||
NSMutableArray *_storeVideoPaintingImages;
|
NSMutableArray *_storeVideoPaintingImages;
|
||||||
@ -131,6 +133,9 @@
|
|||||||
_paintingImageCache = [[TGMemoryImageCache alloc] initWithSoftMemoryLimit:[[self class] imageSoftMemoryLimit]
|
_paintingImageCache = [[TGMemoryImageCache alloc] initWithSoftMemoryLimit:[[self class] imageSoftMemoryLimit]
|
||||||
hardMemoryLimit:[[self class] imageHardMemoryLimit]];
|
hardMemoryLimit:[[self class] imageHardMemoryLimit]];
|
||||||
|
|
||||||
|
_stillPaintingImageCache = [[TGMemoryImageCache alloc] initWithSoftMemoryLimit:[[self class] imageSoftMemoryLimit]
|
||||||
|
hardMemoryLimit:[[self class] imageHardMemoryLimit]];
|
||||||
|
|
||||||
_originalImageCache = [[TGMemoryImageCache alloc] initWithSoftMemoryLimit:[[self class] originalImageSoftMemoryLimit]
|
_originalImageCache = [[TGMemoryImageCache alloc] initWithSoftMemoryLimit:[[self class] originalImageSoftMemoryLimit]
|
||||||
hardMemoryLimit:[[self class] originalImageHardMemoryLimit]];
|
hardMemoryLimit:[[self class] originalImageHardMemoryLimit]];
|
||||||
_originalThumbnailImageCache = [[TGMemoryImageCache alloc] initWithSoftMemoryLimit:[[self class] thumbnailImageSoftMemoryLimit]
|
_originalThumbnailImageCache = [[TGMemoryImageCache alloc] initWithSoftMemoryLimit:[[self class] thumbnailImageSoftMemoryLimit]
|
||||||
@ -145,6 +150,9 @@
|
|||||||
_paintingImagesUrl = [NSURL fileURLWithPath:[[[LegacyComponentsGlobals provider] dataStoragePath] stringByAppendingPathComponent:[NSString stringWithFormat:@"paintingimages/%@", _contextId]]];
|
_paintingImagesUrl = [NSURL fileURLWithPath:[[[LegacyComponentsGlobals provider] dataStoragePath] stringByAppendingPathComponent:[NSString stringWithFormat:@"paintingimages/%@", _contextId]]];
|
||||||
[[NSFileManager defaultManager] createDirectoryAtPath:_paintingImagesUrl.path withIntermediateDirectories:true attributes:nil error:nil];
|
[[NSFileManager defaultManager] createDirectoryAtPath:_paintingImagesUrl.path withIntermediateDirectories:true attributes:nil error:nil];
|
||||||
|
|
||||||
|
_stillPaintingImagesUrl = [NSURL fileURLWithPath:[[[LegacyComponentsGlobals provider] dataStoragePath] stringByAppendingPathComponent:@"stillpaintingimages"]];
|
||||||
|
[[NSFileManager defaultManager] createDirectoryAtPath:_stillPaintingImagesUrl.path withIntermediateDirectories:true attributes:nil error:nil];
|
||||||
|
|
||||||
_videoPaintingImagesUrl = [NSURL fileURLWithPath:[[[LegacyComponentsGlobals provider] dataStoragePath] stringByAppendingPathComponent:@"videopaintingimages"]];
|
_videoPaintingImagesUrl = [NSURL fileURLWithPath:[[[LegacyComponentsGlobals provider] dataStoragePath] stringByAppendingPathComponent:@"videopaintingimages"]];
|
||||||
[[NSFileManager defaultManager] createDirectoryAtPath:_videoPaintingImagesUrl.path withIntermediateDirectories:true attributes:nil error:nil];
|
[[NSFileManager defaultManager] createDirectoryAtPath:_videoPaintingImagesUrl.path withIntermediateDirectories:true attributes:nil error:nil];
|
||||||
|
|
||||||
@ -340,6 +348,27 @@
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (UIImage *)stillPaintingImageForItem:(NSObject<TGMediaEditableItem> *)item
|
||||||
|
{
|
||||||
|
NSString *itemId = [self _contextualIdForItemId:item.uniqueIdentifier];
|
||||||
|
if (itemId == nil)
|
||||||
|
return nil;
|
||||||
|
|
||||||
|
UIImage *result = [_stillPaintingImageCache imageForKey:itemId attributes:NULL];
|
||||||
|
if (result == nil)
|
||||||
|
{
|
||||||
|
NSURL *imageUrl = [_stillPaintingImagesUrl URLByAppendingPathComponent:[NSString stringWithFormat:@"%@.png", [TGStringUtils md5:itemId]]];
|
||||||
|
UIImage *diskImage = [UIImage imageWithContentsOfFile:imageUrl.path];
|
||||||
|
if (diskImage != nil)
|
||||||
|
{
|
||||||
|
result = diskImage;
|
||||||
|
[_stillPaintingImageCache setImage:result forKey:itemId attributes:NULL];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
#pragma mark - Caption
|
#pragma mark - Caption
|
||||||
|
|
||||||
- (NSString *)captionForItem:(id<TGMediaEditableItem>)item
|
- (NSString *)captionForItem:(id<TGMediaEditableItem>)item
|
||||||
@ -602,7 +631,7 @@
|
|||||||
[_queue dispatch:block];
|
[_queue dispatch:block];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (bool)setPaintingData:(NSData *)data image:(UIImage *)image forItem:(NSObject<TGMediaEditableItem> *)item dataUrl:(NSURL **)dataOutUrl imageUrl:(NSURL **)imageOutUrl forVideo:(bool)video
|
- (bool)setPaintingData:(NSData *)data image:(UIImage *)image stillImage:(UIImage *)stillImage forItem:(NSObject<TGMediaEditableItem> *)item dataUrl:(NSURL **)dataOutUrl imageUrl:(NSURL **)imageOutUrl forVideo:(bool)video
|
||||||
{
|
{
|
||||||
NSString *itemId = [self _contextualIdForItemId:item.uniqueIdentifier];
|
NSString *itemId = [self _contextualIdForItemId:item.uniqueIdentifier];
|
||||||
|
|
||||||
@ -628,6 +657,17 @@
|
|||||||
if (video)
|
if (video)
|
||||||
[_storeVideoPaintingImages addObject:imageUrl];
|
[_storeVideoPaintingImages addObject:imageUrl];
|
||||||
|
|
||||||
|
if (stillImage != nil) {
|
||||||
|
NSURL *stillImageUrl = [_stillPaintingImagesUrl URLByAppendingPathComponent:[NSString stringWithFormat:@"%@.png", [TGStringUtils md5:itemId]]];
|
||||||
|
[_stillPaintingImageCache setImage:stillImage forKey:itemId attributes:NULL];
|
||||||
|
|
||||||
|
NSData *stillImageData = UIImagePNGRepresentation(stillImage);
|
||||||
|
[stillImageData writeToURL:stillImageUrl options:NSDataWritingAtomic error:nil];
|
||||||
|
|
||||||
|
if (video)
|
||||||
|
[_storeVideoPaintingImages addObject:stillImageUrl];
|
||||||
|
}
|
||||||
|
|
||||||
return (image == nil || imageSuccess) && (data == nil || dataSuccess);
|
return (image == nil || imageSuccess) && (data == nil || dataSuccess);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -164,7 +164,7 @@
|
|||||||
|
|
||||||
if (recipientName.length > 0)
|
if (recipientName.length > 0)
|
||||||
{
|
{
|
||||||
_arrowView = [[UIImageView alloc] initWithImage:TGComponentsImageNamed(@"PhotoPickerArrow")];
|
_arrowView = [[UIImageView alloc] initWithImage: TGTintedImage([UIImage imageNamed:@"Editor/Recipient"], UIColor.whiteColor)];
|
||||||
_arrowView.alpha = 0.45f;
|
_arrowView.alpha = 0.45f;
|
||||||
[_wrapperView addSubview:_arrowView];
|
[_wrapperView addSubview:_arrowView];
|
||||||
|
|
||||||
|
@ -387,6 +387,7 @@
|
|||||||
TGPhotoEditorControllerIntent intent = isVideo ? TGPhotoEditorControllerVideoIntent : TGPhotoEditorControllerGenericIntent;
|
TGPhotoEditorControllerIntent intent = isVideo ? TGPhotoEditorControllerVideoIntent : TGPhotoEditorControllerGenericIntent;
|
||||||
TGPhotoEditorController *controller = [[TGPhotoEditorController alloc] initWithContext:_context item:item.editableMediaItem intent:intent adjustments:editorValues caption:caption screenImage:screenImage availableTabs:_interfaceView.currentTabs selectedTab:tab];
|
TGPhotoEditorController *controller = [[TGPhotoEditorController alloc] initWithContext:_context item:item.editableMediaItem intent:intent adjustments:editorValues caption:caption screenImage:screenImage availableTabs:_interfaceView.currentTabs selectedTab:tab];
|
||||||
controller.editingContext = _editingContext;
|
controller.editingContext = _editingContext;
|
||||||
|
controller.stickersContext = _stickersContext;
|
||||||
self.editorController = controller;
|
self.editorController = controller;
|
||||||
controller.suggestionContext = self.suggestionContext;
|
controller.suggestionContext = self.suggestionContext;
|
||||||
controller.willFinishEditing = ^(id<TGMediaEditAdjustments> adjustments, id temporaryRep, bool hasChanges)
|
controller.willFinishEditing = ^(id<TGMediaEditAdjustments> adjustments, id temporaryRep, bool hasChanges)
|
||||||
|
@ -166,7 +166,8 @@
|
|||||||
|
|
||||||
_actionButton.highlightImage = highlightImage;
|
_actionButton.highlightImage = highlightImage;
|
||||||
|
|
||||||
_progressView = [[TGMessageImageViewOverlayView alloc] initWithFrame:CGRectMake(0, 0, 50, 50)];
|
_progressView = [[TGMessageImageViewOverlayView alloc] initWithFrame:CGRectMake(0, 0, 60, 60)];
|
||||||
|
[_progressView setRadius:60.0];
|
||||||
_progressView.userInteractionEnabled = false;
|
_progressView.userInteractionEnabled = false;
|
||||||
[_progressView setPlay];
|
[_progressView setPlay];
|
||||||
[_actionButton addSubview:_progressView];
|
[_actionButton addSubview:_progressView];
|
||||||
@ -864,7 +865,8 @@
|
|||||||
|
|
||||||
if (progressVisible && _progressView == nil)
|
if (progressVisible && _progressView == nil)
|
||||||
{
|
{
|
||||||
_progressView = [[TGMessageImageViewOverlayView alloc] initWithFrame:CGRectMake(0.0f, 0.0f, 50.0f, 50.0f)];
|
_progressView = [[TGMessageImageViewOverlayView alloc] initWithFrame:CGRectMake(0.0f, 0.0f, 60.0f, 60.0f)];
|
||||||
|
[_progressView setRadius:60.0];
|
||||||
_progressView.userInteractionEnabled = false;
|
_progressView.userInteractionEnabled = false;
|
||||||
|
|
||||||
_progressView.frame = (CGRect){{CGFloor((self.frame.size.width - _progressView.frame.size.width) / 2.0f), CGFloor((self.frame.size.height - _progressView.frame.size.height) / 2.0f)}, _progressView.frame.size};
|
_progressView.frame = (CGRect){{CGFloor((self.frame.size.width - _progressView.frame.size.width) / 2.0f), CGFloor((self.frame.size.height - _progressView.frame.size.height) / 2.0f)}, _progressView.frame.size};
|
||||||
|
@ -41,17 +41,17 @@
|
|||||||
|
|
||||||
@implementation TGMediaPickerModernGalleryMixin
|
@implementation TGMediaPickerModernGalleryMixin
|
||||||
|
|
||||||
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context item:(id)item fetchResult:(TGMediaAssetFetchResult *)fetchResult parentController:(TGViewController *)parentController thumbnailImage:(UIImage *)thumbnailImage selectionContext:(TGMediaSelectionContext *)selectionContext editingContext:(TGMediaEditingContext *)editingContext suggestionContext:(TGSuggestionContext *)suggestionContext hasCaptions:(bool)hasCaptions allowCaptionEntities:(bool)allowCaptionEntities hasTimer:(bool)hasTimer onlyCrop:(bool)onlyCrop inhibitDocumentCaptions:(bool)inhibitDocumentCaptions inhibitMute:(bool)inhibitMute asFile:(bool)asFile itemsLimit:(NSUInteger)itemsLimit recipientName:(NSString *)recipientName hasSilentPosting:(bool)hasSilentPosting hasSchedule:(bool)hasSchedule reminder:(bool)reminder
|
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context item:(id)item fetchResult:(TGMediaAssetFetchResult *)fetchResult parentController:(TGViewController *)parentController thumbnailImage:(UIImage *)thumbnailImage selectionContext:(TGMediaSelectionContext *)selectionContext editingContext:(TGMediaEditingContext *)editingContext suggestionContext:(TGSuggestionContext *)suggestionContext hasCaptions:(bool)hasCaptions allowCaptionEntities:(bool)allowCaptionEntities hasTimer:(bool)hasTimer onlyCrop:(bool)onlyCrop inhibitDocumentCaptions:(bool)inhibitDocumentCaptions inhibitMute:(bool)inhibitMute asFile:(bool)asFile itemsLimit:(NSUInteger)itemsLimit recipientName:(NSString *)recipientName hasSilentPosting:(bool)hasSilentPosting hasSchedule:(bool)hasSchedule reminder:(bool)reminder stickersContext:(id<TGPhotoPaintStickersContext>)stickersContext
|
||||||
{
|
{
|
||||||
return [self initWithContext:context item:item fetchResult:fetchResult momentList:nil parentController:parentController thumbnailImage:thumbnailImage selectionContext:selectionContext editingContext:editingContext suggestionContext:suggestionContext hasCaptions:hasCaptions allowCaptionEntities:allowCaptionEntities hasTimer:hasTimer onlyCrop:onlyCrop inhibitDocumentCaptions:inhibitDocumentCaptions inhibitMute:inhibitMute asFile:asFile itemsLimit:itemsLimit recipientName:recipientName hasSilentPosting:hasSilentPosting hasSchedule:hasSchedule reminder:reminder];
|
return [self initWithContext:context item:item fetchResult:fetchResult momentList:nil parentController:parentController thumbnailImage:thumbnailImage selectionContext:selectionContext editingContext:editingContext suggestionContext:suggestionContext hasCaptions:hasCaptions allowCaptionEntities:allowCaptionEntities hasTimer:hasTimer onlyCrop:onlyCrop inhibitDocumentCaptions:inhibitDocumentCaptions inhibitMute:inhibitMute asFile:asFile itemsLimit:itemsLimit recipientName:recipientName hasSilentPosting:hasSilentPosting hasSchedule:hasSchedule reminder:reminder stickersContext:stickersContext];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context item:(id)item momentList:(TGMediaAssetMomentList *)momentList parentController:(TGViewController *)parentController thumbnailImage:(UIImage *)thumbnailImage selectionContext:(TGMediaSelectionContext *)selectionContext editingContext:(TGMediaEditingContext *)editingContext suggestionContext:(TGSuggestionContext *)suggestionContext hasCaptions:(bool)hasCaptions allowCaptionEntities:(bool)allowCaptionEntities hasTimer:(bool)hasTimer onlyCrop:(bool)onlyCrop inhibitDocumentCaptions:(bool)inhibitDocumentCaptions inhibitMute:(bool)inhibitMute asFile:(bool)asFile itemsLimit:(NSUInteger)itemsLimit hasSilentPosting:(bool)hasSilentPosting hasSchedule:(bool)hasSchedule reminder:(bool)reminder
|
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context item:(id)item momentList:(TGMediaAssetMomentList *)momentList parentController:(TGViewController *)parentController thumbnailImage:(UIImage *)thumbnailImage selectionContext:(TGMediaSelectionContext *)selectionContext editingContext:(TGMediaEditingContext *)editingContext suggestionContext:(TGSuggestionContext *)suggestionContext hasCaptions:(bool)hasCaptions allowCaptionEntities:(bool)allowCaptionEntities hasTimer:(bool)hasTimer onlyCrop:(bool)onlyCrop inhibitDocumentCaptions:(bool)inhibitDocumentCaptions inhibitMute:(bool)inhibitMute asFile:(bool)asFile itemsLimit:(NSUInteger)itemsLimit hasSilentPosting:(bool)hasSilentPosting hasSchedule:(bool)hasSchedule reminder:(bool)reminder stickersContext:(id<TGPhotoPaintStickersContext>)stickersContext
|
||||||
{
|
{
|
||||||
return [self initWithContext:context item:item fetchResult:nil momentList:momentList parentController:parentController thumbnailImage:thumbnailImage selectionContext:selectionContext editingContext:editingContext suggestionContext:suggestionContext hasCaptions:hasCaptions allowCaptionEntities:allowCaptionEntities hasTimer:hasTimer onlyCrop:onlyCrop inhibitDocumentCaptions:inhibitDocumentCaptions inhibitMute:inhibitMute asFile:asFile itemsLimit:itemsLimit recipientName:nil hasSilentPosting:hasSilentPosting hasSchedule:hasSchedule reminder:reminder];
|
return [self initWithContext:context item:item fetchResult:nil momentList:momentList parentController:parentController thumbnailImage:thumbnailImage selectionContext:selectionContext editingContext:editingContext suggestionContext:suggestionContext hasCaptions:hasCaptions allowCaptionEntities:allowCaptionEntities hasTimer:hasTimer onlyCrop:onlyCrop inhibitDocumentCaptions:inhibitDocumentCaptions inhibitMute:inhibitMute asFile:asFile itemsLimit:itemsLimit recipientName:nil hasSilentPosting:hasSilentPosting hasSchedule:hasSchedule reminder:reminder stickersContext:stickersContext];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context item:(id)item fetchResult:(TGMediaAssetFetchResult *)fetchResult momentList:(TGMediaAssetMomentList *)momentList parentController:(TGViewController *)parentController thumbnailImage:(UIImage *)thumbnailImage selectionContext:(TGMediaSelectionContext *)selectionContext editingContext:(TGMediaEditingContext *)editingContext suggestionContext:(TGSuggestionContext *)suggestionContext hasCaptions:(bool)hasCaptions allowCaptionEntities:(bool)allowCaptionEntities hasTimer:(bool)hasTimer onlyCrop:(bool)onlyCrop inhibitDocumentCaptions:(bool)inhibitDocumentCaptions inhibitMute:(bool)inhibitMute asFile:(bool)asFile itemsLimit:(NSUInteger)itemsLimit recipientName:(NSString *)recipientName hasSilentPosting:(bool)hasSilentPosting hasSchedule:(bool)hasSchedule reminder:(bool)reminder
|
- (instancetype)initWithContext:(id<LegacyComponentsContext>)context item:(id)item fetchResult:(TGMediaAssetFetchResult *)fetchResult momentList:(TGMediaAssetMomentList *)momentList parentController:(TGViewController *)parentController thumbnailImage:(UIImage *)thumbnailImage selectionContext:(TGMediaSelectionContext *)selectionContext editingContext:(TGMediaEditingContext *)editingContext suggestionContext:(TGSuggestionContext *)suggestionContext hasCaptions:(bool)hasCaptions allowCaptionEntities:(bool)allowCaptionEntities hasTimer:(bool)hasTimer onlyCrop:(bool)onlyCrop inhibitDocumentCaptions:(bool)inhibitDocumentCaptions inhibitMute:(bool)inhibitMute asFile:(bool)asFile itemsLimit:(NSUInteger)itemsLimit recipientName:(NSString *)recipientName hasSilentPosting:(bool)hasSilentPosting hasSchedule:(bool)hasSchedule reminder:(bool)reminder stickersContext:(id<TGPhotoPaintStickersContext>)stickersContext
|
||||||
{
|
{
|
||||||
self = [super init];
|
self = [super init];
|
||||||
if (self != nil)
|
if (self != nil)
|
||||||
@ -85,6 +85,7 @@
|
|||||||
|
|
||||||
TGMediaPickerGalleryModel *model = [[TGMediaPickerGalleryModel alloc] initWithContext:[_windowManager context] items:galleryItems focusItem:focusItem selectionContext:selectionContext editingContext:editingContext hasCaptions:hasCaptions allowCaptionEntities:allowCaptionEntities hasTimer:hasTimer onlyCrop:onlyCrop inhibitDocumentCaptions:inhibitDocumentCaptions hasSelectionPanel:true hasCamera:false recipientName:recipientName];
|
TGMediaPickerGalleryModel *model = [[TGMediaPickerGalleryModel alloc] initWithContext:[_windowManager context] items:galleryItems focusItem:focusItem selectionContext:selectionContext editingContext:editingContext hasCaptions:hasCaptions allowCaptionEntities:allowCaptionEntities hasTimer:hasTimer onlyCrop:onlyCrop inhibitDocumentCaptions:inhibitDocumentCaptions hasSelectionPanel:true hasCamera:false recipientName:recipientName];
|
||||||
_galleryModel = model;
|
_galleryModel = model;
|
||||||
|
model.stickersContext = stickersContext;
|
||||||
model.inhibitMute = inhibitMute;
|
model.inhibitMute = inhibitMute;
|
||||||
model.controller = modernGallery;
|
model.controller = modernGallery;
|
||||||
model.suggestionContext = suggestionContext;
|
model.suggestionContext = suggestionContext;
|
||||||
|
@ -3,13 +3,18 @@
|
|||||||
#import <CommonCrypto/CommonDigest.h>
|
#import <CommonCrypto/CommonDigest.h>
|
||||||
#import <sys/stat.h>
|
#import <sys/stat.h>
|
||||||
|
|
||||||
|
#import "GPUImageContext.h"
|
||||||
|
|
||||||
#import "LegacyComponentsInternal.h"
|
#import "LegacyComponentsInternal.h"
|
||||||
|
|
||||||
#import "TGImageUtils.h"
|
#import "TGImageUtils.h"
|
||||||
#import "TGPhotoEditorUtils.h"
|
#import "TGPhotoEditorUtils.h"
|
||||||
|
#import "PGPhotoEditor.h"
|
||||||
|
#import "TGPhotoPaintEntity.h"
|
||||||
|
|
||||||
#import "TGVideoEditAdjustments.h"
|
#import "TGVideoEditAdjustments.h"
|
||||||
#import "TGPaintingData.h"
|
#import "TGPaintingData.h"
|
||||||
|
#import "TGPhotoPaintStickersContext.h"
|
||||||
|
|
||||||
@interface TGMediaVideoConversionPresetSettings ()
|
@interface TGMediaVideoConversionPresetSettings ()
|
||||||
|
|
||||||
@ -68,6 +73,8 @@
|
|||||||
@property (nonatomic, readonly) TGMediaSampleBufferProcessor *videoProcessor;
|
@property (nonatomic, readonly) TGMediaSampleBufferProcessor *videoProcessor;
|
||||||
@property (nonatomic, readonly) TGMediaSampleBufferProcessor *audioProcessor;
|
@property (nonatomic, readonly) TGMediaSampleBufferProcessor *audioProcessor;
|
||||||
|
|
||||||
|
@property (nonatomic, readonly) id<TGPhotoPaintEntityRenderer> entityRenderer;
|
||||||
|
|
||||||
@property (nonatomic, readonly) CMTimeRange timeRange;
|
@property (nonatomic, readonly) CMTimeRange timeRange;
|
||||||
@property (nonatomic, readonly) CGSize dimensions;
|
@property (nonatomic, readonly) CGSize dimensions;
|
||||||
@property (nonatomic, readonly) UIImage *coverImage;
|
@property (nonatomic, readonly) UIImage *coverImage;
|
||||||
@ -79,7 +86,7 @@
|
|||||||
|
|
||||||
- (instancetype)addImageGenerator:(AVAssetImageGenerator *)imageGenerator;
|
- (instancetype)addImageGenerator:(AVAssetImageGenerator *)imageGenerator;
|
||||||
- (instancetype)addCoverImage:(UIImage *)coverImage;
|
- (instancetype)addCoverImage:(UIImage *)coverImage;
|
||||||
- (instancetype)contextWithAssetReader:(AVAssetReader *)assetReader assetWriter:(AVAssetWriter *)assetWriter videoProcessor:(TGMediaSampleBufferProcessor *)videoProcessor audioProcessor:(TGMediaSampleBufferProcessor *)audioProcessor timeRange:(CMTimeRange)timeRange dimensions:(CGSize)dimensions;
|
- (instancetype)contextWithAssetReader:(AVAssetReader *)assetReader assetWriter:(AVAssetWriter *)assetWriter videoProcessor:(TGMediaSampleBufferProcessor *)videoProcessor audioProcessor:(TGMediaSampleBufferProcessor *)audioProcessor timeRange:(CMTimeRange)timeRange dimensions:(CGSize)dimensions entityRenderer:(id<TGPhotoPaintEntityRenderer>)entityRenderer;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@ -93,12 +100,12 @@
|
|||||||
|
|
||||||
@implementation TGMediaVideoConverter
|
@implementation TGMediaVideoConverter
|
||||||
|
|
||||||
+ (SSignal *)convertAVAsset:(AVAsset *)avAsset adjustments:(TGMediaVideoEditAdjustments *)adjustments watcher:(TGMediaVideoFileWatcher *)watcher
|
+ (SSignal *)convertAVAsset:(AVAsset *)avAsset adjustments:(TGMediaVideoEditAdjustments *)adjustments watcher:(TGMediaVideoFileWatcher *)watcher entityRenderer:(id<TGPhotoPaintEntityRenderer>)entityRenderer
|
||||||
{
|
{
|
||||||
return [self convertAVAsset:avAsset adjustments:adjustments watcher:watcher inhibitAudio:false];
|
return [self convertAVAsset:avAsset adjustments:adjustments watcher:watcher inhibitAudio:false entityRenderer:entityRenderer];
|
||||||
}
|
}
|
||||||
|
|
||||||
+ (SSignal *)convertAVAsset:(AVAsset *)avAsset adjustments:(TGMediaVideoEditAdjustments *)adjustments watcher:(TGMediaVideoFileWatcher *)watcher inhibitAudio:(bool)inhibitAudio
|
+ (SSignal *)convertAVAsset:(AVAsset *)avAsset adjustments:(TGMediaVideoEditAdjustments *)adjustments watcher:(TGMediaVideoFileWatcher *)watcher inhibitAudio:(bool)inhibitAudio entityRenderer:(id<TGPhotoPaintEntityRenderer>)entityRenderer
|
||||||
{
|
{
|
||||||
SQueue *queue = [[SQueue alloc] init];
|
SQueue *queue = [[SQueue alloc] init];
|
||||||
|
|
||||||
@ -146,7 +153,7 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (![self setupAssetReaderWriterForAVAsset:avAsset outputURL:outputUrl preset:preset adjustments:adjustments inhibitAudio:inhibitAudio conversionContext:context error:&error])
|
if (![self setupAssetReaderWriterForItem:avAsset outputURL:outputUrl preset:preset entityRenderer:entityRenderer adjustments:adjustments inhibitAudio:inhibitAudio conversionContext:context error:&error])
|
||||||
{
|
{
|
||||||
[subscriber putError:error];
|
[subscriber putError:error];
|
||||||
return;
|
return;
|
||||||
@ -204,6 +211,92 @@
|
|||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
+ (SSignal *)renderUIImage:(UIImage *)image adjustments:(TGMediaVideoEditAdjustments *)adjustments watcher:(TGMediaVideoFileWatcher *)watcher entityRenderer:(id<TGPhotoPaintEntityRenderer>)entityRenderer
|
||||||
|
{
|
||||||
|
SQueue *queue = [[SQueue alloc] init];
|
||||||
|
|
||||||
|
return [[SSignal alloc] initWithGenerator:^id<SDisposable>(SSubscriber *subscriber)
|
||||||
|
{
|
||||||
|
SAtomic *context = [[SAtomic alloc] initWithValue:[TGMediaVideoConversionContext contextWithQueue:queue subscriber:subscriber]];
|
||||||
|
NSURL *outputUrl = [self _randomTemporaryURL];
|
||||||
|
|
||||||
|
[queue dispatch:^
|
||||||
|
{
|
||||||
|
if (((TGMediaVideoConversionContext *)context.value).cancelled)
|
||||||
|
return;
|
||||||
|
|
||||||
|
TGMediaVideoConversionPreset preset = TGMediaVideoConversionPresetAnimation;
|
||||||
|
|
||||||
|
NSError *error = nil;
|
||||||
|
|
||||||
|
NSString *outputPath = outputUrl.path;
|
||||||
|
NSFileManager *fileManager = [NSFileManager defaultManager];
|
||||||
|
if ([fileManager fileExistsAtPath:outputPath])
|
||||||
|
{
|
||||||
|
[fileManager removeItemAtPath:outputPath error:&error];
|
||||||
|
if (error != nil)
|
||||||
|
{
|
||||||
|
[subscriber putError:error];
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (![self setupAssetReaderWriterForItem:image outputURL:outputUrl preset:preset entityRenderer:entityRenderer adjustments:adjustments inhibitAudio:true conversionContext:context error:&error])
|
||||||
|
{
|
||||||
|
[subscriber putError:error];
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
TGDispatchAfter(1.0, queue._dispatch_queue, ^
|
||||||
|
{
|
||||||
|
if (watcher != nil)
|
||||||
|
[watcher setupWithFileURL:outputUrl];
|
||||||
|
});
|
||||||
|
|
||||||
|
[self processWithConversionContext:context completionBlock:^
|
||||||
|
{
|
||||||
|
TGMediaVideoConversionContext *resultContext = context.value;
|
||||||
|
[resultContext.imageGenerator generateCGImagesAsynchronouslyForTimes:@[ [NSValue valueWithCMTime:kCMTimeZero] ] completionHandler:^(__unused CMTime requestedTime, CGImageRef _Nullable image, __unused CMTime actualTime, AVAssetImageGeneratorResult result, __unused NSError * _Nullable error)
|
||||||
|
{
|
||||||
|
UIImage *coverImage = nil;
|
||||||
|
if (result == AVAssetImageGeneratorSucceeded)
|
||||||
|
coverImage = [UIImage imageWithCGImage:image];
|
||||||
|
|
||||||
|
__block TGMediaVideoConversionResult *contextResult = nil;
|
||||||
|
[context modify:^id(TGMediaVideoConversionContext *resultContext)
|
||||||
|
{
|
||||||
|
id liveUploadData = nil;
|
||||||
|
if (watcher != nil)
|
||||||
|
liveUploadData = [watcher fileUpdated:true];
|
||||||
|
|
||||||
|
contextResult = [TGMediaVideoConversionResult resultWithFileURL:outputUrl fileSize:0 duration:CMTimeGetSeconds(resultContext.timeRange.duration) dimensions:resultContext.dimensions coverImage:coverImage liveUploadData:liveUploadData];
|
||||||
|
return [resultContext finishedContext];
|
||||||
|
}];
|
||||||
|
|
||||||
|
[subscriber putNext:contextResult];
|
||||||
|
[subscriber putCompletion];
|
||||||
|
}];
|
||||||
|
}];
|
||||||
|
}];
|
||||||
|
|
||||||
|
return [[SBlockDisposable alloc] initWithBlock:^
|
||||||
|
{
|
||||||
|
[queue dispatch:^
|
||||||
|
{
|
||||||
|
[context modify:^id(TGMediaVideoConversionContext *currentContext)
|
||||||
|
{
|
||||||
|
if (currentContext.finished)
|
||||||
|
return currentContext;
|
||||||
|
|
||||||
|
[currentContext.videoProcessor cancel];
|
||||||
|
|
||||||
|
return [currentContext cancelledContext];
|
||||||
|
}];
|
||||||
|
}];
|
||||||
|
}];
|
||||||
|
}];
|
||||||
|
}
|
||||||
|
|
||||||
+ (CGSize)dimensionsFor:(CGSize)dimensions adjustments:(TGMediaVideoEditAdjustments *)adjustments preset:(TGMediaVideoConversionPreset)preset {
|
+ (CGSize)dimensionsFor:(CGSize)dimensions adjustments:(TGMediaVideoEditAdjustments *)adjustments preset:(TGMediaVideoConversionPreset)preset {
|
||||||
CGRect transformedRect = CGRectMake(0.0f, 0.0f, dimensions.width, dimensions.height);
|
CGRect transformedRect = CGRectMake(0.0f, 0.0f, dimensions.width, dimensions.height);
|
||||||
|
|
||||||
@ -221,7 +314,7 @@
|
|||||||
return outputDimensions;
|
return outputDimensions;
|
||||||
}
|
}
|
||||||
|
|
||||||
+ (AVAssetReaderVideoCompositionOutput *)setupVideoCompositionOutputWithAVAsset:(AVAsset *)avAsset composition:(AVMutableComposition *)composition videoTrack:(AVAssetTrack *)videoTrack preset:(TGMediaVideoConversionPreset)preset adjustments:(TGMediaVideoEditAdjustments *)adjustments timeRange:(CMTimeRange)timeRange outputSettings:(NSDictionary **)outputSettings dimensions:(CGSize *)dimensions conversionContext:(SAtomic *)conversionContext
|
+ (AVAssetReaderVideoCompositionOutput *)setupVideoCompositionOutputWithAVAsset:(AVAsset *)avAsset composition:(AVMutableComposition *)composition videoTrack:(AVAssetTrack *)videoTrack preset:(TGMediaVideoConversionPreset)preset entityRenderer:(id<TGPhotoPaintEntityRenderer>)entityRenderer adjustments:(TGMediaVideoEditAdjustments *)adjustments timeRange:(CMTimeRange)timeRange outputSettings:(NSDictionary **)outputSettings dimensions:(CGSize *)dimensions conversionContext:(SAtomic *)conversionContext
|
||||||
{
|
{
|
||||||
CGSize transformedSize = CGRectApplyAffineTransform((CGRect){CGPointZero, videoTrack.naturalSize}, videoTrack.preferredTransform).size;;
|
CGSize transformedSize = CGRectApplyAffineTransform((CGRect){CGPointZero, videoTrack.naturalSize}, videoTrack.preferredTransform).size;;
|
||||||
CGRect transformedRect = CGRectMake(0, 0, transformedSize.width, transformedSize.height);
|
CGRect transformedRect = CGRectMake(0, 0, transformedSize.width, transformedSize.height);
|
||||||
@ -241,7 +334,80 @@
|
|||||||
if (TGOrientationIsSideward(adjustments.cropOrientation, NULL))
|
if (TGOrientationIsSideward(adjustments.cropOrientation, NULL))
|
||||||
outputDimensions = CGSizeMake(outputDimensions.height, outputDimensions.width);
|
outputDimensions = CGSizeMake(outputDimensions.height, outputDimensions.width);
|
||||||
|
|
||||||
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
|
AVMutableCompositionTrack *trimVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
|
||||||
|
[trimVideoTrack insertTimeRange:timeRange ofTrack:videoTrack atTime:kCMTimeZero error:NULL];
|
||||||
|
|
||||||
|
UIImage *overlayImage = nil;
|
||||||
|
if (adjustments.paintingData.imagePath != nil)
|
||||||
|
overlayImage = [UIImage imageWithContentsOfFile:adjustments.paintingData.imagePath];
|
||||||
|
|
||||||
|
bool hasAnimation = false;
|
||||||
|
for (TGPhotoPaintEntity *entity in adjustments.paintingData.entities) {
|
||||||
|
if (entity.animated) {
|
||||||
|
hasAnimation = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!hasAnimation) {
|
||||||
|
entityRenderer = nil;
|
||||||
|
}
|
||||||
|
|
||||||
|
AVMutableVideoComposition *videoComposition;
|
||||||
|
if (entityRenderer != nil || adjustments.toolsApplied) {
|
||||||
|
PGPhotoEditor *editor = nil;
|
||||||
|
CIContext *ciContext = nil;
|
||||||
|
if (adjustments.toolsApplied) {
|
||||||
|
editor = [[PGPhotoEditor alloc] initWithOriginalSize:adjustments.originalSize adjustments:adjustments forVideo:true enableStickers:true];
|
||||||
|
ciContext = [CIContext contextWithEAGLContext:[[GPUImageContext sharedImageProcessingContext] context]];
|
||||||
|
}
|
||||||
|
|
||||||
|
__block CIImage *overlayCIImage = nil;
|
||||||
|
videoComposition = [AVMutableVideoComposition videoCompositionWithAsset:avAsset applyingCIFiltersWithHandler:^(AVAsynchronousCIImageFilteringRequest * _Nonnull request) {
|
||||||
|
__block CIImage *resultImage = request.sourceImage;
|
||||||
|
|
||||||
|
if (editor != nil) {
|
||||||
|
[editor setCIImage:resultImage];
|
||||||
|
resultImage = editor.currentResultCIImage;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (overlayImage != nil && overlayImage.size.width > 0.0) {
|
||||||
|
if (overlayCIImage == nil) {
|
||||||
|
overlayCIImage = [[CIImage alloc] initWithImage:overlayImage];
|
||||||
|
CGFloat scale = request.sourceImage.extent.size.width / overlayCIImage.extent.size.width;
|
||||||
|
overlayCIImage = [overlayCIImage imageByApplyingTransform:CGAffineTransformMakeScale(scale, scale)];
|
||||||
|
}
|
||||||
|
resultImage = [overlayCIImage imageByCompositingOverImage:resultImage];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (entityRenderer != nil) {
|
||||||
|
[entityRenderer entitiesForTime:request.compositionTime size:request.sourceImage.extent.size completion:^(NSArray<CIImage *> *images) {
|
||||||
|
for (CIImage *image in images) {
|
||||||
|
resultImage = [image imageByCompositingOverImage:resultImage];
|
||||||
|
}
|
||||||
|
[request finishWithImage:resultImage context:ciContext];
|
||||||
|
}];
|
||||||
|
} else {
|
||||||
|
[request finishWithImage:resultImage context:ciContext];
|
||||||
|
}
|
||||||
|
}];
|
||||||
|
} else {
|
||||||
|
videoComposition = [AVMutableVideoComposition videoComposition];
|
||||||
|
|
||||||
|
bool mirrored = false;
|
||||||
|
UIImageOrientation videoOrientation = TGVideoOrientationForAsset(avAsset, &mirrored);
|
||||||
|
CGAffineTransform transform = TGVideoTransformForOrientation(videoOrientation, videoTrack.naturalSize, cropRect, mirrored);
|
||||||
|
CGAffineTransform rotationTransform = TGVideoTransformForCrop(adjustments.cropOrientation, cropRect.size, adjustments.cropMirrored);
|
||||||
|
CGAffineTransform finalTransform = CGAffineTransformConcat(transform, rotationTransform);
|
||||||
|
|
||||||
|
AVMutableVideoCompositionLayerInstruction *transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:trimVideoTrack];
|
||||||
|
[transformer setTransform:finalTransform atTime:kCMTimeZero];
|
||||||
|
|
||||||
|
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
|
||||||
|
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, timeRange.duration);
|
||||||
|
instruction.layerInstructions = [NSArray arrayWithObject:transformer];
|
||||||
|
videoComposition.instructions = [NSArray arrayWithObject:instruction];
|
||||||
|
}
|
||||||
|
|
||||||
if (videoTrack.nominalFrameRate > 0)
|
if (videoTrack.nominalFrameRate > 0)
|
||||||
videoComposition.frameDuration = CMTimeMake(1, (int32_t)videoTrack.nominalFrameRate);
|
videoComposition.frameDuration = CMTimeMake(1, (int32_t)videoTrack.nominalFrameRate);
|
||||||
else if (CMTimeCompare(videoTrack.minFrameDuration, kCMTimeZero) == 1)
|
else if (CMTimeCompare(videoTrack.minFrameDuration, kCMTimeZero) == 1)
|
||||||
@ -259,28 +425,7 @@
|
|||||||
if (videoComposition.renderSize.width < FLT_EPSILON || videoComposition.renderSize.height < FLT_EPSILON)
|
if (videoComposition.renderSize.width < FLT_EPSILON || videoComposition.renderSize.height < FLT_EPSILON)
|
||||||
return nil;
|
return nil;
|
||||||
|
|
||||||
AVMutableCompositionTrack *trimVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
|
if (overlayImage != nil && entityRenderer == nil)
|
||||||
[trimVideoTrack insertTimeRange:timeRange ofTrack:videoTrack atTime:kCMTimeZero error:NULL];
|
|
||||||
|
|
||||||
bool mirrored = false;
|
|
||||||
UIImageOrientation videoOrientation = TGVideoOrientationForAsset(avAsset, &mirrored);
|
|
||||||
CGAffineTransform transform = TGVideoTransformForOrientation(videoOrientation, videoTrack.naturalSize, cropRect, mirrored);
|
|
||||||
CGAffineTransform rotationTransform = TGVideoTransformForCrop(adjustments.cropOrientation, cropRect.size, adjustments.cropMirrored);
|
|
||||||
CGAffineTransform finalTransform = CGAffineTransformConcat(transform, rotationTransform);
|
|
||||||
|
|
||||||
AVMutableVideoCompositionLayerInstruction *transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:trimVideoTrack];
|
|
||||||
[transformer setTransform:finalTransform atTime:kCMTimeZero];
|
|
||||||
|
|
||||||
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
|
|
||||||
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, timeRange.duration);
|
|
||||||
instruction.layerInstructions = [NSArray arrayWithObject:transformer];
|
|
||||||
videoComposition.instructions = [NSArray arrayWithObject:instruction];
|
|
||||||
|
|
||||||
UIImage *overlayImage = nil;
|
|
||||||
if (adjustments.paintingData.imagePath != nil)
|
|
||||||
overlayImage = [UIImage imageWithContentsOfFile:adjustments.paintingData.imagePath];
|
|
||||||
|
|
||||||
if (overlayImage != nil)
|
|
||||||
{
|
{
|
||||||
CALayer *parentLayer = [CALayer layer];
|
CALayer *parentLayer = [CALayer layer];
|
||||||
parentLayer.frame = CGRectMake(0, 0, videoComposition.renderSize.width, videoComposition.renderSize.height);
|
parentLayer.frame = CGRectMake(0, 0, videoComposition.renderSize.width, videoComposition.renderSize.height);
|
||||||
@ -335,11 +480,14 @@
|
|||||||
return output;
|
return output;
|
||||||
}
|
}
|
||||||
|
|
||||||
+ (bool)setupAssetReaderWriterForAVAsset:(AVAsset *)avAsset outputURL:(NSURL *)outputURL preset:(TGMediaVideoConversionPreset)preset adjustments:(TGMediaVideoEditAdjustments *)adjustments inhibitAudio:(bool)inhibitAudio conversionContext:(SAtomic *)outConversionContext error:(NSError **)error
|
+ (bool)setupAssetReaderWriterForItem:(id)item outputURL:(NSURL *)outputURL preset:(TGMediaVideoConversionPreset)preset entityRenderer:(id<TGPhotoPaintEntityRenderer>)entityRenderer adjustments:(TGMediaVideoEditAdjustments *)adjustments inhibitAudio:(bool)inhibitAudio conversionContext:(SAtomic *)outConversionContext error:(NSError **)error
|
||||||
{
|
{
|
||||||
|
if ([item isKindOfClass:[AVAsset class]]) {
|
||||||
TGMediaSampleBufferProcessor *videoProcessor = nil;
|
TGMediaSampleBufferProcessor *videoProcessor = nil;
|
||||||
TGMediaSampleBufferProcessor *audioProcessor = nil;
|
TGMediaSampleBufferProcessor *audioProcessor = nil;
|
||||||
|
|
||||||
|
AVAsset *avAsset = (AVAsset *)item;
|
||||||
|
|
||||||
AVAssetTrack *audioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] firstObject];
|
AVAssetTrack *audioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] firstObject];
|
||||||
AVAssetTrack *videoTrack = [[avAsset tracksWithMediaType:AVMediaTypeVideo] firstObject];
|
AVAssetTrack *videoTrack = [[avAsset tracksWithMediaType:AVMediaTypeVideo] firstObject];
|
||||||
if (videoTrack == nil)
|
if (videoTrack == nil)
|
||||||
@ -363,7 +511,7 @@
|
|||||||
|
|
||||||
NSDictionary *outputSettings = nil;
|
NSDictionary *outputSettings = nil;
|
||||||
AVMutableComposition *composition = [AVMutableComposition composition];
|
AVMutableComposition *composition = [AVMutableComposition composition];
|
||||||
AVAssetReaderVideoCompositionOutput *output = [self setupVideoCompositionOutputWithAVAsset:avAsset composition:composition videoTrack:videoTrack preset:preset adjustments:adjustments timeRange:timeRange outputSettings:&outputSettings dimensions:&dimensions conversionContext:outConversionContext];
|
AVAssetReaderVideoCompositionOutput *output = [self setupVideoCompositionOutputWithAVAsset:avAsset composition:composition videoTrack:videoTrack preset:preset entityRenderer:entityRenderer adjustments:adjustments timeRange:timeRange outputSettings:&outputSettings dimensions:&dimensions conversionContext:outConversionContext];
|
||||||
if (output == nil)
|
if (output == nil)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
@ -378,14 +526,6 @@
|
|||||||
[assetReader addOutput:output];
|
[assetReader addOutput:output];
|
||||||
|
|
||||||
AVAssetWriterInput *input = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
|
AVAssetWriterInput *input = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
|
||||||
|
|
||||||
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
|
|
||||||
[NSNumber numberWithInt:dimensions.width], kCVPixelBufferWidthKey,
|
|
||||||
[NSNumber numberWithInt:dimensions.height], kCVPixelBufferHeightKey,
|
|
||||||
nil];
|
|
||||||
|
|
||||||
AVAssetWriterInputPixelBufferAdaptor *pixelBufferInput = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:input sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
|
|
||||||
|
|
||||||
[assetWriter addInput:input];
|
[assetWriter addInput:input];
|
||||||
|
|
||||||
videoProcessor = [[TGMediaSampleBufferProcessor alloc] initWithAssetReaderOutput:output assetWriterInput:input];
|
videoProcessor = [[TGMediaSampleBufferProcessor alloc] initWithAssetReaderOutput:output assetWriterInput:input];
|
||||||
@ -408,10 +548,45 @@
|
|||||||
|
|
||||||
[outConversionContext modify:^id(TGMediaVideoConversionContext *currentContext)
|
[outConversionContext modify:^id(TGMediaVideoConversionContext *currentContext)
|
||||||
{
|
{
|
||||||
return [currentContext contextWithAssetReader:assetReader assetWriter:assetWriter videoProcessor:videoProcessor audioProcessor:audioProcessor timeRange:timeRange dimensions:dimensions];
|
return [currentContext contextWithAssetReader:assetReader assetWriter:assetWriter videoProcessor:videoProcessor audioProcessor:audioProcessor timeRange:timeRange dimensions:dimensions entityRenderer:entityRenderer];
|
||||||
}];
|
}];
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
|
} else if ([item isKindOfClass:[UIImage class]]) {
|
||||||
|
TGMediaSampleBufferProcessor *videoProcessor = nil;
|
||||||
|
|
||||||
|
CGSize dimensions = CGSizeZero;
|
||||||
|
NSDictionary *outputSettings = nil;
|
||||||
|
CMTimeRange timeRange = CMTimeRangeMake(CMTimeMakeWithSeconds(0.0, NSEC_PER_SEC), CMTimeMakeWithSeconds(4.0, NSEC_PER_SEC));
|
||||||
|
AVMutableComposition *composition = [AVMutableComposition composition];
|
||||||
|
AVAssetTrack *videoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
|
||||||
|
AVAssetReaderVideoCompositionOutput *output = [self setupVideoCompositionOutputWithAVAsset:composition composition:composition videoTrack:videoTrack preset:preset entityRenderer:entityRenderer adjustments:adjustments timeRange:timeRange outputSettings:&outputSettings dimensions:&dimensions conversionContext:outConversionContext];
|
||||||
|
if (output == nil)
|
||||||
|
return false;
|
||||||
|
|
||||||
|
AVAssetReader *assetReader = [[AVAssetReader alloc] initWithAsset:composition error:error];
|
||||||
|
if (assetReader == nil)
|
||||||
|
return false;
|
||||||
|
|
||||||
|
AVAssetWriter *assetWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeMPEG4 error:error];
|
||||||
|
if (assetWriter == nil)
|
||||||
|
return false;
|
||||||
|
|
||||||
|
[assetReader addOutput:output];
|
||||||
|
|
||||||
|
AVAssetWriterInput *input = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
|
||||||
|
[assetWriter addInput:input];
|
||||||
|
|
||||||
|
videoProcessor = [[TGMediaSampleBufferProcessor alloc] initWithAssetReaderOutput:output assetWriterInput:input];
|
||||||
|
|
||||||
|
[outConversionContext modify:^id(TGMediaVideoConversionContext *currentContext)
|
||||||
|
{
|
||||||
|
return [currentContext contextWithAssetReader:assetReader assetWriter:assetWriter videoProcessor:videoProcessor audioProcessor:nil timeRange:timeRange dimensions:dimensions entityRenderer:entityRenderer];
|
||||||
|
}];
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
+ (void)processWithConversionContext:(SAtomic *)context_ completionBlock:(void (^)(void))completionBlock
|
+ (void)processWithConversionContext:(SAtomic *)context_ completionBlock:(void (^)(void))completionBlock
|
||||||
@ -865,6 +1040,7 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
|
|||||||
context->_dimensions = _dimensions;
|
context->_dimensions = _dimensions;
|
||||||
context->_coverImage = _coverImage;
|
context->_coverImage = _coverImage;
|
||||||
context->_imageGenerator = _imageGenerator;
|
context->_imageGenerator = _imageGenerator;
|
||||||
|
context->_entityRenderer = _entityRenderer;
|
||||||
return context;
|
return context;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -883,6 +1059,7 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
|
|||||||
context->_dimensions = _dimensions;
|
context->_dimensions = _dimensions;
|
||||||
context->_coverImage = _coverImage;
|
context->_coverImage = _coverImage;
|
||||||
context->_imageGenerator = _imageGenerator;
|
context->_imageGenerator = _imageGenerator;
|
||||||
|
context->_entityRenderer = _entityRenderer;
|
||||||
return context;
|
return context;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -900,6 +1077,7 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
|
|||||||
context->_dimensions = _dimensions;
|
context->_dimensions = _dimensions;
|
||||||
context->_coverImage = _coverImage;
|
context->_coverImage = _coverImage;
|
||||||
context->_imageGenerator = imageGenerator;
|
context->_imageGenerator = imageGenerator;
|
||||||
|
context->_entityRenderer = _entityRenderer;
|
||||||
return context;
|
return context;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -917,10 +1095,11 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
|
|||||||
context->_dimensions = _dimensions;
|
context->_dimensions = _dimensions;
|
||||||
context->_coverImage = coverImage;
|
context->_coverImage = coverImage;
|
||||||
context->_imageGenerator = _imageGenerator;
|
context->_imageGenerator = _imageGenerator;
|
||||||
|
context->_entityRenderer = _entityRenderer;
|
||||||
return context;
|
return context;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)contextWithAssetReader:(AVAssetReader *)assetReader assetWriter:(AVAssetWriter *)assetWriter videoProcessor:(TGMediaSampleBufferProcessor *)videoProcessor audioProcessor:(TGMediaSampleBufferProcessor *)audioProcessor timeRange:(CMTimeRange)timeRange dimensions:(CGSize)dimensions
|
- (instancetype)contextWithAssetReader:(AVAssetReader *)assetReader assetWriter:(AVAssetWriter *)assetWriter videoProcessor:(TGMediaSampleBufferProcessor *)videoProcessor audioProcessor:(TGMediaSampleBufferProcessor *)audioProcessor timeRange:(CMTimeRange)timeRange dimensions:(CGSize)dimensions entityRenderer:(id<TGPhotoPaintEntityRenderer>)entityRenderer
|
||||||
{
|
{
|
||||||
TGMediaVideoConversionContext *context = [[TGMediaVideoConversionContext alloc] init];
|
TGMediaVideoConversionContext *context = [[TGMediaVideoConversionContext alloc] init];
|
||||||
context->_queue = _queue;
|
context->_queue = _queue;
|
||||||
@ -934,6 +1113,7 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
|
|||||||
context->_dimensions = dimensions;
|
context->_dimensions = dimensions;
|
||||||
context->_coverImage = _coverImage;
|
context->_coverImage = _coverImage;
|
||||||
context->_imageGenerator = _imageGenerator;
|
context->_imageGenerator = _imageGenerator;
|
||||||
|
context->_entityRenderer = entityRenderer;
|
||||||
return context;
|
return context;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -702,7 +702,7 @@ const NSInteger TGMessageImageViewOverlayParticlesCount = 40;
|
|||||||
CGFloat offset = round(diameter * 0.06f);
|
CGFloat offset = round(diameter * 0.06f);
|
||||||
CGFloat verticalOffset = 0.0f;
|
CGFloat verticalOffset = 0.0f;
|
||||||
CGFloat alpha = 0.8f;
|
CGFloat alpha = 0.8f;
|
||||||
UIColor *iconColor = TGColorWithHexAndAlpha(0xff000000, 0.45f);
|
UIColor *iconColor = TGColorWithHexAndAlpha(0xffffffff, 1.0f);
|
||||||
if (diameter <= 25.0f + FLT_EPSILON) {
|
if (diameter <= 25.0f + FLT_EPSILON) {
|
||||||
offset = round(50.0f * 0.06f) - 1.0f;
|
offset = round(50.0f * 0.06f) - 1.0f;
|
||||||
verticalOffset += 0.5f;
|
verticalOffset += 0.5f;
|
||||||
@ -730,16 +730,11 @@ const NSInteger TGMessageImageViewOverlayParticlesCount = 40;
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
CGContextSetFillColorWithColor(context, TGColorWithHexAndAlpha(0xffffffff, alpha).CGColor);
|
CGContextSetFillColorWithColor(context, TGColorWithHexAndAlpha(0x00000000, 0.3).CGColor);
|
||||||
CGContextFillEllipseInRect(context, CGRectMake(0.0f, 0.0f, diameter, diameter));
|
CGContextFillEllipseInRect(context, CGRectMake(0.0f, 0.0f, diameter, diameter));
|
||||||
|
|
||||||
CGContextBeginPath(context);
|
UIImage *iconImage = TGTintedImage([UIImage imageNamed:@"Editor/Play"], iconColor);
|
||||||
CGContextMoveToPoint(context, offset + floor((diameter - width) / 2.0f), verticalOffset + floor((diameter - height) / 2.0f));
|
[iconImage drawAtPoint:CGPointMake(floor((diameter - iconImage.size.width) / 2.0f), floor((diameter - iconImage.size.height) / 2.0f)) blendMode:kCGBlendModeNormal alpha:1.0f];
|
||||||
CGContextAddLineToPoint(context, offset + floor((diameter - width) / 2.0f) + width, verticalOffset + floor(diameter / 2.0f));
|
|
||||||
CGContextAddLineToPoint(context, offset + floor((diameter - width) / 2.0f), verticalOffset + floor((diameter + height) / 2.0f));
|
|
||||||
CGContextClosePath(context);
|
|
||||||
CGContextSetFillColorWithColor(context, iconColor.CGColor);
|
|
||||||
CGContextFillPath(context);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
break;
|
break;
|
||||||
|
@ -24,7 +24,7 @@
|
|||||||
_containerView = [[TGModernGalleryImageItemContainerView alloc] initWithFrame:_internalContainerView.bounds];
|
_containerView = [[TGModernGalleryImageItemContainerView alloc] initWithFrame:_internalContainerView.bounds];
|
||||||
[_internalContainerView addSubview:_containerView];
|
[_internalContainerView addSubview:_containerView];
|
||||||
|
|
||||||
_scrollView = [[TGModernGalleryZoomableScrollView alloc] initWithFrame:_containerView.bounds];
|
_scrollView = [[TGModernGalleryZoomableScrollView alloc] initWithFrame:_containerView.bounds hasDoubleTap:true];
|
||||||
_scrollView.delegate = self;
|
_scrollView.delegate = self;
|
||||||
_scrollView.showsHorizontalScrollIndicator = false;
|
_scrollView.showsHorizontalScrollIndicator = false;
|
||||||
_scrollView.showsVerticalScrollIndicator = false;
|
_scrollView.showsVerticalScrollIndicator = false;
|
||||||
|
@ -3,25 +3,48 @@
|
|||||||
#import "TGDoubleTapGestureRecognizer.h"
|
#import "TGDoubleTapGestureRecognizer.h"
|
||||||
|
|
||||||
@interface TGModernGalleryZoomableScrollView () <TGDoubleTapGestureRecognizerDelegate>
|
@interface TGModernGalleryZoomableScrollView () <TGDoubleTapGestureRecognizerDelegate>
|
||||||
|
{
|
||||||
|
bool _hasDoubleTap;
|
||||||
|
}
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@implementation TGModernGalleryZoomableScrollView
|
@implementation TGModernGalleryZoomableScrollView
|
||||||
|
|
||||||
- (instancetype)initWithFrame:(CGRect)frame
|
- (instancetype)initWithFrame:(CGRect)frame hasDoubleTap:(bool)hasDoubleTap
|
||||||
{
|
{
|
||||||
self = [super initWithFrame:frame];
|
self = [super initWithFrame:frame];
|
||||||
if (self != nil)
|
if (self != nil)
|
||||||
{
|
{
|
||||||
|
_hasDoubleTap = hasDoubleTap;
|
||||||
|
if (hasDoubleTap) {
|
||||||
TGDoubleTapGestureRecognizer *recognizer = [[TGDoubleTapGestureRecognizer alloc] initWithTarget:self action:@selector(doubleTapGesture:)];
|
TGDoubleTapGestureRecognizer *recognizer = [[TGDoubleTapGestureRecognizer alloc] initWithTarget:self action:@selector(doubleTapGesture:)];
|
||||||
recognizer.consumeSingleTap = true;
|
recognizer.consumeSingleTap = true;
|
||||||
[self addGestureRecognizer:recognizer];
|
[self addGestureRecognizer:recognizer];
|
||||||
|
} else {
|
||||||
|
self.panGestureRecognizer.minimumNumberOfTouches = 2;
|
||||||
|
}
|
||||||
|
|
||||||
_normalZoomScale = 1.0f;
|
_normalZoomScale = 1.0f;
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (void)setContentInset:(UIEdgeInsets)contentInset {
|
||||||
|
if (_hasDoubleTap) {
|
||||||
|
[super setContentInset:contentInset];
|
||||||
|
} else {
|
||||||
|
[super setContentInset:UIEdgeInsetsZero];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
- (UIEdgeInsets)adjustedContentInset {
|
||||||
|
if (_hasDoubleTap) {
|
||||||
|
return [super adjustedContentInset];
|
||||||
|
} else {
|
||||||
|
return UIEdgeInsetsZero;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
- (void)doubleTapGesture:(TGDoubleTapGestureRecognizer *)recognizer
|
- (void)doubleTapGesture:(TGDoubleTapGestureRecognizer *)recognizer
|
||||||
{
|
{
|
||||||
if (recognizer.state == UIGestureRecognizerStateRecognized)
|
if (recognizer.state == UIGestureRecognizerStateRecognized)
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
#import <LegacyComponents/LegacyComponents.h>
|
#import <LegacyComponents/LegacyComponents.h>
|
||||||
|
|
||||||
const CGSize TGPaintBrushTextureSize = { 256.0f, 256.0f };
|
const CGSize TGPaintBrushTextureSize = { 384.0f, 384.0f };
|
||||||
const CGSize TGPaintBrushPreviewTextureSize = { 64.0f, 64.0f };
|
const CGSize TGPaintBrushPreviewTextureSize = { 64.0f, 64.0f };
|
||||||
|
|
||||||
@interface TGPaintBrush ()
|
@interface TGPaintBrush ()
|
||||||
|
@ -22,6 +22,8 @@
|
|||||||
CGAffineTransform _canvasTransform;
|
CGAffineTransform _canvasTransform;
|
||||||
CGRect _dirtyRect;
|
CGRect _dirtyRect;
|
||||||
|
|
||||||
|
CGRect _visibleRect;
|
||||||
|
|
||||||
TGPaintInput *_input;
|
TGPaintInput *_input;
|
||||||
TGPaintPanGestureRecognizer *_gestureRecognizer;
|
TGPaintPanGestureRecognizer *_gestureRecognizer;
|
||||||
bool _beganDrawing;
|
bool _beganDrawing;
|
||||||
@ -40,6 +42,8 @@
|
|||||||
if (self != nil)
|
if (self != nil)
|
||||||
{
|
{
|
||||||
self.contentScaleFactor = _screenScale;
|
self.contentScaleFactor = _screenScale;
|
||||||
|
self.multipleTouchEnabled = true;
|
||||||
|
self.exclusiveTouch = true;
|
||||||
|
|
||||||
_state = [[TGPaintState alloc] init];
|
_state = [[TGPaintState alloc] init];
|
||||||
|
|
||||||
@ -82,10 +86,18 @@
|
|||||||
- (void)setFrame:(CGRect)frame
|
- (void)setFrame:(CGRect)frame
|
||||||
{
|
{
|
||||||
[super setFrame:frame];
|
[super setFrame:frame];
|
||||||
|
_visibleRect = self.bounds;
|
||||||
|
|
||||||
[self _updateTransform];
|
[self _updateTransform];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (void)setBounds:(CGRect)bounds
|
||||||
|
{
|
||||||
|
[super setBounds:bounds];
|
||||||
|
|
||||||
|
_visibleRect = bounds;
|
||||||
|
}
|
||||||
|
|
||||||
- (void)_updateTransform
|
- (void)_updateTransform
|
||||||
{
|
{
|
||||||
CGAffineTransform transform = CGAffineTransformIdentity;
|
CGAffineTransform transform = CGAffineTransformIdentity;
|
||||||
@ -112,7 +124,7 @@
|
|||||||
_gestureRecognizer = [[TGPaintPanGestureRecognizer alloc] initWithTarget:self action:@selector(handlePan:)];
|
_gestureRecognizer = [[TGPaintPanGestureRecognizer alloc] initWithTarget:self action:@selector(handlePan:)];
|
||||||
_gestureRecognizer.delegate = self;
|
_gestureRecognizer.delegate = self;
|
||||||
_gestureRecognizer.minimumNumberOfTouches = 1;
|
_gestureRecognizer.minimumNumberOfTouches = 1;
|
||||||
_gestureRecognizer.maximumNumberOfTouches = 1;
|
_gestureRecognizer.maximumNumberOfTouches = 2;
|
||||||
|
|
||||||
__weak TGPaintCanvas *weakSelf = self;
|
__weak TGPaintCanvas *weakSelf = self;
|
||||||
_gestureRecognizer.shouldRecognizeTap = ^bool
|
_gestureRecognizer.shouldRecognizeTap = ^bool
|
||||||
@ -162,7 +174,7 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
- (BOOL)gestureRecognizerShouldBegin:(UIGestureRecognizer *)__unused gestureRecognizer
|
- (BOOL)gestureRecognizerShouldBegin:(UIGestureRecognizer *)gestureRecognizer
|
||||||
{
|
{
|
||||||
if (self.shouldDraw != nil)
|
if (self.shouldDraw != nil)
|
||||||
return self.shouldDraw();
|
return self.shouldDraw();
|
||||||
@ -172,9 +184,9 @@
|
|||||||
|
|
||||||
- (BOOL)gestureRecognizer:(UIGestureRecognizer *)gestureRecognizer shouldRecognizeSimultaneouslyWithGestureRecognizer:(UIGestureRecognizer *)otherGestureRecognizer
|
- (BOOL)gestureRecognizer:(UIGestureRecognizer *)gestureRecognizer shouldRecognizeSimultaneouslyWithGestureRecognizer:(UIGestureRecognizer *)otherGestureRecognizer
|
||||||
{
|
{
|
||||||
if (gestureRecognizer == _gestureRecognizer && ([otherGestureRecognizer isKindOfClass:[UIPinchGestureRecognizer class]] || [otherGestureRecognizer isKindOfClass:[UIRotationGestureRecognizer class]]))
|
// if (gestureRecognizer == _gestureRecognizer && ([otherGestureRecognizer isKindOfClass:[UIPinchGestureRecognizer class]] || [otherGestureRecognizer isKindOfClass:[UIRotationGestureRecognizer class]]))
|
||||||
return false;
|
// return false;
|
||||||
|
//
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -264,16 +276,18 @@
|
|||||||
|
|
||||||
- (CGRect)visibleRect
|
- (CGRect)visibleRect
|
||||||
{
|
{
|
||||||
return self.bounds;
|
return _visibleRect;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)layoutSubviews
|
- (void)layoutSubviews
|
||||||
{
|
{
|
||||||
[super layoutSubviews];
|
[super layoutSubviews];
|
||||||
|
|
||||||
|
[self.painting performSynchronouslyInContext:^{
|
||||||
[_buffers update];
|
[_buffers update];
|
||||||
|
|
||||||
[self draw];
|
[self draw];
|
||||||
|
}];
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma mark - GL Setup
|
#pragma mark - GL Setup
|
||||||
|
@ -168,8 +168,10 @@
|
|||||||
TGPaintCanvas *canvas = (TGPaintCanvas *) recognizer.view;
|
TGPaintCanvas *canvas = (TGPaintCanvas *) recognizer.view;
|
||||||
TGPainting *painting = canvas.painting;
|
TGPainting *painting = canvas.painting;
|
||||||
|
|
||||||
|
[painting performAsynchronouslyInContext:^{
|
||||||
painting.activePath = nil;
|
painting.activePath = nil;
|
||||||
[canvas draw];
|
[canvas draw];
|
||||||
|
}];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)paintPath:(TGPaintPath *)path inCanvas:(TGPaintCanvas *)canvas
|
- (void)paintPath:(TGPaintPath *)path inCanvas:(TGPaintCanvas *)canvas
|
||||||
|
@ -16,8 +16,12 @@
|
|||||||
- (void)touchesMoved:(NSSet *)inTouches withEvent:(UIEvent *)event
|
- (void)touchesMoved:(NSSet *)inTouches withEvent:(UIEvent *)event
|
||||||
{
|
{
|
||||||
_touches = [inTouches copy];
|
_touches = [inTouches copy];
|
||||||
|
if (inTouches.count > 1) {
|
||||||
|
self.state = UIGestureRecognizerStateCancelled;
|
||||||
|
} else {
|
||||||
[super touchesMoved:inTouches withEvent:event];
|
[super touchesMoved:inTouches withEvent:event];
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
- (void)touchesEnded:(NSSet *)inTouches withEvent:(UIEvent *)event
|
- (void)touchesEnded:(NSSet *)inTouches withEvent:(UIEvent *)event
|
||||||
{
|
{
|
||||||
|
@ -168,7 +168,8 @@ typedef struct
|
|||||||
for (f = state.remainder; f <= distance; f += step, pressure += pressureStep)
|
for (f = state.remainder; f <= distance; f += step, pressure += pressureStep)
|
||||||
{
|
{
|
||||||
CGFloat alpha = boldenFirst ? boldenedAlpha : state.alpha;
|
CGFloat alpha = boldenFirst ? boldenedAlpha : state.alpha;
|
||||||
CGFloat brushSize = MIN(brushWeight, brushWeight - pressure * brushWeight * 0.55f);
|
CGFloat brushSize = brushWeight;
|
||||||
|
// CGFloat brushSize = MIN(brushWeight, brushWeight - pressure * brushWeight * 0.55f);
|
||||||
[state addPoint:start size:brushSize angle:vectorAngle alpha:alpha index:i];
|
[state addPoint:start size:brushSize angle:vectorAngle alpha:alpha index:i];
|
||||||
|
|
||||||
start = TGPaintAddPoints(start, TGPaintMultiplyPoint(unitVector, step));
|
start = TGPaintAddPoints(start, TGPaintMultiplyPoint(unitVector, step));
|
||||||
|
@ -27,6 +27,7 @@
|
|||||||
|
|
||||||
- (instancetype)initWithSize:(CGSize)size undoManager:(TGPaintUndoManager *)undoManager imageData:(NSData *)imageData;
|
- (instancetype)initWithSize:(CGSize)size undoManager:(TGPaintUndoManager *)undoManager imageData:(NSData *)imageData;
|
||||||
|
|
||||||
|
- (void)performSynchronouslyInContext:(void (^)(void))block;
|
||||||
- (void)performAsynchronouslyInContext:(void (^)(void))block;
|
- (void)performAsynchronouslyInContext:(void (^)(void))block;
|
||||||
|
|
||||||
- (void)paintStroke:(TGPaintPath *)path clearBuffer:(bool)clearBuffer completion:(void (^)(void))completion;
|
- (void)paintStroke:(TGPaintPath *)path clearBuffer:(bool)clearBuffer completion:(void (^)(void))completion;
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user