From c5248e7d4c4555189f6f97c3b694b3b180ea70f3 Mon Sep 17 00:00:00 2001 From: levs42 Date: Wed, 7 Aug 2024 16:17:37 -0700 Subject: [PATCH] Allow to render effects only on stream --- .../Extension/CMSampleBuffer+Extension.swift | 24 +++++ Sources/IO/IOVideoUnit.swift | 30 ++++-- Sources/Screen/Screen.swift | 46 ++++++++- Sources/Screen/ScreenObject.swift | 39 +++++--- Sources/Screen/ScreenRenderer.swift | 94 +++++++++++++------ 5 files changed, 180 insertions(+), 53 deletions(-) diff --git a/Sources/Extension/CMSampleBuffer+Extension.swift b/Sources/Extension/CMSampleBuffer+Extension.swift index 0a4c2f923..0efd46f06 100644 --- a/Sources/Extension/CMSampleBuffer+Extension.swift +++ b/Sources/Extension/CMSampleBuffer+Extension.swift @@ -3,6 +3,8 @@ import AVFoundation import CoreMedia extension CMSampleBuffer { + static let ScreenObjectImageTarget: CFString = "ScreenObjectImageTarget" as CFString + @inlinable @inline(__always) var isNotSync: Bool { get { guard !sampleAttachments.isEmpty else { @@ -17,4 +19,26 @@ extension CMSampleBuffer { sampleAttachments[0][.notSync] = newValue ? 1 : nil } } + + var targetType: ScreenObject.ImageTarget? { + get { + guard let rawTargetAttachment = CMGetAttachment( + self, + key: CMSampleBuffer.ScreenObjectImageTarget as CFString, + attachmentModeOut: nil) as? NSNumber + else { return nil } + + return ScreenObject.ImageTarget(rawValue: rawTargetAttachment.intValue) + } + set { + if let value = newValue { + CMSetAttachment(self, + key: CMSampleBuffer.ScreenObjectImageTarget, + value: NSNumber(value: value.rawValue), + attachmentMode: kCMAttachmentMode_ShouldPropagate) + } else { + CMRemoveAttachment(self, key: CMSampleBuffer.ScreenObjectImageTarget) + } + } + } } diff --git a/Sources/IO/IOVideoUnit.swift b/Sources/IO/IOVideoUnit.swift index aac06235c..56aaddbdd 100644 --- a/Sources/IO/IOVideoUnit.swift +++ b/Sources/IO/IOVideoUnit.swift @@ -255,14 +255,28 @@ extension IOVideoUnit: IOVideoMixerDelegate { } func videoMixer(_ videoMixer: IOVideoMixer, didOutput sampleBuffer: CMSampleBuffer) { - if let imageBuffer = sampleBuffer.imageBuffer { - codec.append( - imageBuffer, - presentationTimeStamp: sampleBuffer.presentationTimeStamp, - duration: sampleBuffer.duration - ) + func sendToStream() { + if let imageBuffer = sampleBuffer.imageBuffer { + codec.append( + imageBuffer, + presentationTimeStamp: sampleBuffer.presentationTimeStamp, + duration: sampleBuffer.duration + ) + } + mixer?.videoUnit(self, didOutput: sampleBuffer) + } + func sendToView() { + view?.enqueue(sampleBuffer) + } + let targetType = sampleBuffer.targetType ?? .both + switch targetType { + case .both: + sendToStream() + sendToView() + case .stream: + sendToStream() + case .view: + sendToView() } - view?.enqueue(sampleBuffer) - mixer?.videoUnit(self, didOutput: sampleBuffer) } } diff --git a/Sources/Screen/Screen.swift b/Sources/Screen/Screen.swift index 0788c6acf..350b2833d 100644 --- a/Sources/Screen/Screen.swift +++ b/Sources/Screen/Screen.swift @@ -79,6 +79,9 @@ public final class Screen: ScreenObjectContainerConvertible { } } #endif + + public var renderEffectsSeparately = true + weak var observer: (any ScreenObserver)? private var root: ScreenObjectContainer = .init() private(set) var renderer = ScreenRendererByCPU() @@ -131,6 +134,23 @@ public final class Screen: ScreenObjectContainerConvertible { root.draw(renderer) return sampleBuffer } + + func render(streamBuffer: CMSampleBuffer, viewBuffer: CMSampleBuffer) { + streamBuffer.imageBuffer?.lockBaseAddress(Self.lockFrags) + viewBuffer.imageBuffer?.lockBaseAddress(Self.lockFrags) + defer { + streamBuffer.imageBuffer?.unlockBaseAddress(Self.lockFrags) + viewBuffer.imageBuffer?.unlockBaseAddress(Self.lockFrags) + } + renderer.setTarget(streamBuffer.imageBuffer, .stream) + renderer.setTarget(viewBuffer.imageBuffer, .view) + if let dimensions = streamBuffer.formatDescription?.dimensions { + root.size = dimensions.size + } + delegate?.screen(self, willLayout: streamBuffer.presentationTimeStamp) + root.layout(renderer) + root.draw(renderer) + } } extension Screen: Running { @@ -191,7 +211,31 @@ extension Screen: ChoreographerDelegate { ) == noErr else { return } - if let sampleBuffer { + if renderEffectsSeparately { + var viewPixelBuffer: CVPixelBuffer? + pixelBufferPool?.createPixelBuffer(&viewPixelBuffer) + guard let viewPixelBuffer else { + return + } + var viewSampleBuffer: CMSampleBuffer? + guard CMSampleBufferCreateReadyWithImageBuffer( + allocator: kCFAllocatorDefault, + imageBuffer: viewPixelBuffer, + formatDescription: outputFormat, + sampleTiming: &timingInfo, + sampleBufferOut: &viewSampleBuffer + ) == noErr else { + return + } + if let sampleBuffer, let viewSampleBuffer { + render(streamBuffer: sampleBuffer, viewBuffer: viewSampleBuffer) + sampleBuffer.targetType = .stream + viewSampleBuffer.targetType = .view + observer?.screen(self, didOutput: sampleBuffer) + observer?.screen(self, didOutput: viewSampleBuffer) + } + } else if let sampleBuffer { + sampleBuffer.targetType = .both observer?.screen(self, didOutput: render(sampleBuffer)) } } diff --git a/Sources/Screen/ScreenObject.swift b/Sources/Screen/ScreenObject.swift index 9635385cb..8e437e638 100644 --- a/Sources/Screen/ScreenObject.swift +++ b/Sources/Screen/ScreenObject.swift @@ -35,6 +35,12 @@ open class ScreenObject { case bottom } + public enum ImageTarget: Int, Equatable { + case stream + case view + case both + } + /// The screen object container that contains this screen object public internal(set) weak var parent: ScreenObjectContainer? @@ -83,8 +89,8 @@ open class ScreenObject { } /// Makes cgImage for offscreen image. - open func makeImage(_ renderer: some ScreenRenderer) -> CGImage? { - return nil + open func makeImage(_ renderer: some ScreenRenderer) -> [(ImageTarget, CGImage?)] { + return [] } /// Makes screen object bounds for offscreen image. @@ -157,11 +163,11 @@ public final class ImageScreenObject: ScreenObject { } } - override public func makeImage(_ renderer: some ScreenRenderer) -> CGImage? { + override public func makeImage(_ renderer: some ScreenRenderer) -> [(ImageTarget, CGImage?)] { let intersection = bounds.intersection(renderer.bounds) guard bounds != intersection else { - return cgImage + return [(.both, cgImage)] } // Handling when the drawing area is exceeded. @@ -185,7 +191,8 @@ public final class ImageScreenObject: ScreenObject { y = abs(bounds.origin.y) } - return cgImage?.cropping(to: .init(origin: .init(x: x, y: y), size: intersection.size)) + let image = cgImage?.cropping(to: .init(origin: .init(x: x, y: y), size: intersection.size)) + return [(.both, image)] } override public func makeBounds(_ size: CGSize) -> CGRect { @@ -252,9 +259,9 @@ public final class VideoTrackScreenObject: ScreenObject, ChromaKeyProcessorble { return false } - override public func makeImage(_ renderer: some ScreenRenderer) -> CGImage? { + override public func makeImage(_ renderer: some ScreenRenderer) -> [(ImageTarget, CGImage?)] { guard let sampleBuffer = queue?.dequeue(), let pixelBuffer = sampleBuffer.imageBuffer else { - return nil + return [] } // Resizing before applying the filter for performance optimization. var image = CIImage(cvPixelBuffer: pixelBuffer).transformed(by: videoGravity.scale( @@ -262,12 +269,14 @@ public final class VideoTrackScreenObject: ScreenObject, ChromaKeyProcessorble { image: pixelBuffer.size )) if effects.isEmpty { - return renderer.context.createCGImage(image, from: videoGravity.region(bounds, image: image.extent)) + return [(.both, renderer.context.createCGImage(image, from: videoGravity.region(bounds, image: image.extent)))] } else { + let viewImage = renderer.context.createCGImage(image, from: videoGravity.region(bounds, image: image.extent)) for effect in effects { image = effect.execute(image, info: sampleBuffer) } - return renderer.context.createCGImage(image, from: videoGravity.region(bounds, image: image.extent)) + let streamImage = renderer.context.createCGImage(image, from: videoGravity.region(bounds, image: image.extent)) + return [(.view, viewImage), (.stream, streamImage)] } } @@ -372,15 +381,15 @@ public final class TextScreenObject: ScreenObject { return super.makeBounds(frameSize) } - override public func makeImage(_ renderer: some ScreenRenderer) -> CGImage? { + override public func makeImage(_ renderer: some ScreenRenderer) -> [(ImageTarget, CGImage?)] { guard let context, let framesetter else { - return nil + return [] } let path = CGPath(rect: .init(origin: .zero, size: bounds.size), transform: nil) let frame = CTFramesetterCreateFrame(framesetter, .init(), path, nil) context.clear(context.boundingBoxOfPath) CTFrameDraw(frame, context) - return context.makeImage() + return [(.both, context.makeImage())] } } @@ -472,15 +481,15 @@ public final class AssetScreenObject: ScreenObject, ChromaKeyProcessorble { } } - override public func makeImage(_ renderer: some ScreenRenderer) -> CGImage? { + override public func makeImage(_ renderer: some ScreenRenderer) -> [(ImageTarget, CGImage?)] { guard let sampleBuffer, let pixelBuffer = sampleBuffer.imageBuffer else { - return nil + return [] } let image = CIImage(cvPixelBuffer: pixelBuffer).transformed(by: videoGravity.scale( bounds.size, image: pixelBuffer.size )) - return renderer.context.createCGImage(image, from: videoGravity.region(bounds, image: image.extent)) + return [(.both, renderer.context.createCGImage(image, from: videoGravity.region(bounds, image: image.extent)))] } override func draw(_ renderer: some ScreenRenderer) { diff --git a/Sources/Screen/ScreenRenderer.swift b/Sources/Screen/ScreenRenderer.swift index 8bd97c725..c7dc21f0b 100644 --- a/Sources/Screen/ScreenRenderer.swift +++ b/Sources/Screen/ScreenRenderer.swift @@ -16,7 +16,7 @@ public protocol ScreenRenderer: AnyObject { /// Draws a sceen object. func draw(_ screenObject: ScreenObject) /// Sets up the render target. - func setTarget(_ pixelBuffer: CVPixelBuffer?) + func setTarget(_ pixelBuffer: CVPixelBuffer?, _ targetType: ScreenObject.ImageTarget) } final class ScreenRendererByCPU: ScreenRenderer { @@ -72,7 +72,9 @@ final class ScreenRendererByCPU: ScreenRenderer { decode: nil, renderingIntent: .defaultIntent) private var images: [ScreenObject: vImage_Buffer] = [:] + private var viewImages: [ScreenObject: vImage_Buffer] = [:] private var canvas: vImage_Buffer = .init() + private var viewCanvas: vImage_Buffer = .init() private var converter: vImageConverter? private var shapeFactory = ShapeFactory() private var pixelFormatType: OSType? { @@ -88,7 +90,51 @@ final class ScreenRendererByCPU: ScreenRenderer { return try? ChromaKeyProcessor() }() - func setTarget(_ pixelBuffer: CVPixelBuffer?) { + func setTarget(_ pixelBuffer: CVPixelBuffer?, _ targetType: ScreenObject.ImageTarget = .both) { + guard let pixelBuffer else { + return + } + switch targetType { + case .view: + setTarget(pixelBuffer, &viewCanvas) + default: + setTarget(pixelBuffer, &canvas) + } + } + + func layout(_ screenObject: ScreenObject) { + autoreleasepool { + let imageList = screenObject.makeImage(self) + for (target, image) in imageList { + guard let image else { + continue + } + switch target { + case .view: + layout(screenObject, image, &viewImages) + case .stream: + layout(screenObject, image, &images) + case .both: + layout(screenObject, image, &images) + layout(screenObject, image, &viewImages) + } + } + } + } + + func draw(_ screenObject: ScreenObject) { + let origin = screenObject.bounds.origin + + if var image = images[screenObject] { + draw(&image, canvas, origin) + } + + if var viewImage = viewImages[screenObject] { + draw(&viewImage, viewCanvas, origin) + } + } + + func setTarget(_ pixelBuffer: CVPixelBuffer?, _ canvas: inout vImage_Buffer) { guard let pixelBuffer else { return } @@ -122,38 +168,28 @@ final class ScreenRendererByCPU: ScreenRenderer { } } - func layout(_ screenObject: ScreenObject) { - autoreleasepool { - guard let image = screenObject.makeImage(self) else { - return - } - do { - images[screenObject]?.free() - var buffer = try vImage_Buffer(cgImage: image, format: format) - images[screenObject] = buffer - if 0 < screenObject.cornerRadius { - if var mask = shapeFactory.cornerRadius(image.size, cornerRadius: screenObject.cornerRadius) { - vImageOverwriteChannels_ARGB8888(&mask, &buffer, &buffer, 0x8, Self.noFlags) - } - } else { - if let screenObject = screenObject as? (any ChromaKeyProcessorble), - let chromaKeyColor = screenObject.chromaKeyColor, - var mask = try choromaKeyProcessor?.makeMask(&buffer, chromeKeyColor: chromaKeyColor) { - vImageOverwriteChannels_ARGB8888(&mask, &buffer, &buffer, 0x8, Self.noFlags) - } + private func layout(_ screenObject: ScreenObject, _ image: CGImage, _ images: inout [ScreenObject: vImage_Buffer]) { + do { + images[screenObject]?.free() + var buffer = try vImage_Buffer(cgImage: image, format: format) + images[screenObject] = buffer + if 0 < screenObject.cornerRadius { + if var mask = shapeFactory.cornerRadius(image.size, cornerRadius: screenObject.cornerRadius) { + vImageOverwriteChannels_ARGB8888(&mask, &buffer, &buffer, 0x8, Self.noFlags) + } + } else { + if let screenObject = screenObject as? (any ChromaKeyProcessorble), + let chromaKeyColor = screenObject.chromaKeyColor, + var mask = try choromaKeyProcessor?.makeMask(&buffer, chromeKeyColor: chromaKeyColor) { + vImageOverwriteChannels_ARGB8888(&mask, &buffer, &buffer, 0x8, Self.noFlags) } - } catch { - logger.error(error) } + } catch { + logger.error(error) } } - func draw(_ screenObject: ScreenObject) { - guard var image = images[screenObject] else { - return - } - - let origin = screenObject.bounds.origin + private func draw(_ image: inout vImage_Buffer, _ canvas: vImage_Buffer, _ origin: CGPoint) { let start = Int(max(0, origin.y)) * canvas.rowBytes + Int(max(0, origin.x)) * 4 var destination = vImage_Buffer(