add layer video compositor example

This commit is contained in:
Sami Samhuri 2016-11-16 19:59:20 -08:00
parent dd5091fe2a
commit f716607e17
No known key found for this signature in database
GPG key ID: F76F41F04D99808F
6 changed files with 289 additions and 9 deletions

View file

@ -12,6 +12,9 @@
7B426AC01DDD559D002E94E7 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 7B426ABE1DDD559D002E94E7 /* Main.storyboard */; };
7B426AC21DDD559D002E94E7 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 7B426AC11DDD559D002E94E7 /* Assets.xcassets */; };
7B426AC51DDD559D002E94E7 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 7B426AC31DDD559D002E94E7 /* LaunchScreen.storyboard */; };
7B426ACE1DDD5647002E94E7 /* LayerVideoCompositionInstruction.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B426ACC1DDD5647002E94E7 /* LayerVideoCompositionInstruction.swift */; };
7B426ACF1DDD5647002E94E7 /* LayerVideoCompositor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B426ACD1DDD5647002E94E7 /* LayerVideoCompositor.swift */; };
7B426AD11DDD5739002E94E7 /* video.mov in Resources */ = {isa = PBXBuildFile; fileRef = 7B426AD01DDD5739002E94E7 /* video.mov */; };
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
@ -22,6 +25,9 @@
7B426AC11DDD559D002E94E7 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
7B426AC41DDD559D002E94E7 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
7B426AC61DDD559D002E94E7 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
7B426ACC1DDD5647002E94E7 /* LayerVideoCompositionInstruction.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = LayerVideoCompositionInstruction.swift; sourceTree = "<group>"; };
7B426ACD1DDD5647002E94E7 /* LayerVideoCompositor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = LayerVideoCompositor.swift; sourceTree = "<group>"; };
7B426AD01DDD5739002E94E7 /* video.mov */ = {isa = PBXFileReference; lastKnownFileType = video.quicktime; path = video.mov; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
@ -54,6 +60,9 @@
7B426AB91DDD559D002E94E7 /* LayerVideoCompositor */ = {
isa = PBXGroup;
children = (
7B426AD01DDD5739002E94E7 /* video.mov */,
7B426ACC1DDD5647002E94E7 /* LayerVideoCompositionInstruction.swift */,
7B426ACD1DDD5647002E94E7 /* LayerVideoCompositor.swift */,
7B426ABA1DDD559D002E94E7 /* AppDelegate.swift */,
7B426ABC1DDD559D002E94E7 /* ViewController.swift */,
7B426ABE1DDD559D002E94E7 /* Main.storyboard */,
@ -97,6 +106,7 @@
7B426AB61DDD559D002E94E7 = {
CreatedOnToolsVersion = 8.1;
DevelopmentTeam = X45WPY5JFZ;
LastSwiftMigration = 0810;
ProvisioningStyle = Automatic;
};
};
@ -124,6 +134,7 @@
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
7B426AD11DDD5739002E94E7 /* video.mov in Resources */,
7B426AC51DDD559D002E94E7 /* LaunchScreen.storyboard in Resources */,
7B426AC21DDD559D002E94E7 /* Assets.xcassets in Resources */,
7B426AC01DDD559D002E94E7 /* Main.storyboard in Resources */,
@ -137,7 +148,9 @@
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
7B426ACF1DDD5647002E94E7 /* LayerVideoCompositor.swift in Sources */,
7B426ABD1DDD559D002E94E7 /* ViewController.swift in Sources */,
7B426ACE1DDD5647002E94E7 /* LayerVideoCompositionInstruction.swift in Sources */,
7B426ABB1DDD559D002E94E7 /* AppDelegate.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
@ -261,11 +274,14 @@
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ENABLE_MODULES = YES;
DEVELOPMENT_TEAM = X45WPY5JFZ;
INFOPLIST_FILE = LayerVideoCompositor/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = ca.gurulogic.LayerVideoCompositor;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
SWIFT_VERSION = 3.0;
};
name = Debug;
@ -274,8 +290,10 @@
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ENABLE_MODULES = YES;
DEVELOPMENT_TEAM = X45WPY5JFZ;
INFOPLIST_FILE = LayerVideoCompositor/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = ca.gurulogic.LayerVideoCompositor;
PRODUCT_NAME = "$(TARGET_NAME)";
@ -302,6 +320,7 @@
7B426ACB1DDD559D002E94E7 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};

View file

@ -1,14 +1,17 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="11134" systemVersion="15F34" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" colorMatched="YES" initialViewController="BYZ-38-t0r">
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="11542" systemVersion="16B2555" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" colorMatched="YES" initialViewController="BYZ-38-t0r">
<device id="retina4_7" orientation="portrait">
<adaptation id="fullscreen"/>
</device>
<dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="11106"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="11524"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="tne-QT-ifu">
<objects>
<viewController id="BYZ-38-t0r" customClass="ViewController" customModuleProvider="target" sceneMemberID="viewController">
<viewController id="BYZ-38-t0r" customClass="ViewController" customModule="LayerVideoCompositor" customModuleProvider="target" sceneMemberID="viewController">
<layoutGuides>
<viewControllerLayoutGuide type="top" id="y3c-jy-aDJ"/>
<viewControllerLayoutGuide type="bottom" id="wfy-db-euE"/>
@ -16,8 +19,30 @@
<view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<subviews>
<activityIndicatorView opaque="NO" contentMode="scaleToFill" horizontalHuggingPriority="750" verticalHuggingPriority="750" hidesWhenStopped="YES" animating="YES" style="whiteLarge" translatesAutoresizingMaskIntoConstraints="NO" id="gMZ-I5-gYe">
<rect key="frame" x="169" y="315" width="37" height="37"/>
</activityIndicatorView>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Rendering video" textAlignment="center" lineBreakMode="wordWrap" numberOfLines="0" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="xce-mO-TBx">
<rect key="frame" x="10" y="286" width="355" height="21"/>
<fontDescription key="fontDescription" type="system" pointSize="17"/>
<color key="textColor" white="1" alpha="1" colorSpace="calibratedWhite"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" red="0.20000000000000001" green="0.20000000000000001" blue="0.20000000000000001" alpha="0.85090648849999995" colorSpace="calibratedRGB"/>
<constraints>
<constraint firstItem="gMZ-I5-gYe" firstAttribute="top" secondItem="xce-mO-TBx" secondAttribute="bottom" constant="8" id="BnQ-UY-9C3"/>
<constraint firstItem="xce-mO-TBx" firstAttribute="centerX" secondItem="gMZ-I5-gYe" secondAttribute="centerX" id="HTV-8x-I5q"/>
<constraint firstItem="xce-mO-TBx" firstAttribute="width" secondItem="8bC-Xf-vdC" secondAttribute="width" constant="-20" id="OzH-Sr-3uc"/>
<constraint firstItem="gMZ-I5-gYe" firstAttribute="centerX" secondItem="8bC-Xf-vdC" secondAttribute="centerX" id="c3e-hH-pRE"/>
<constraint firstItem="gMZ-I5-gYe" firstAttribute="centerY" secondItem="8bC-Xf-vdC" secondAttribute="centerY" id="nKh-xD-zV1"/>
</constraints>
</view>
<connections>
<outlet property="indicator" destination="gMZ-I5-gYe" id="pya-LZ-csF"/>
<outlet property="label" destination="xce-mO-TBx" id="8ka-Tc-7Am"/>
</connections>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
</objects>

View file

@ -0,0 +1,36 @@
//
// LayerVideoCompositionInstruction.swift
// LayerVideoCompositor
//
// Created by Sami Samhuri on 2016-09-05.
// Copyright © 2016 Guru Logic Inc. All rights reserved.
//
import Foundation
import AVFoundation
final class LayerVideoCompositionInstruction: NSObject, AVVideoCompositionInstructionProtocol {
// Fixed
let enablePostProcessing: Bool = true
let containsTweening: Bool = false
let passthroughTrackID: CMPersistentTrackID = kCMPersistentTrackID_Invalid
// Variable
let timeRange: CMTimeRange
let requiredSourceTrackIDs: [NSValue]?
let videoTrackID: CMPersistentTrackID
let targetSize: CGSize
let transform: CGAffineTransform
let overlayLayer: CALayer?
init(track: AVAssetTrack, timeRange: CMTimeRange, overlayLayer: CALayer?, transform: CGAffineTransform, targetSize: CGSize) {
assert(overlayLayer == nil || overlayLayer!.bounds.size == targetSize)
self.requiredSourceTrackIDs = [NSNumber(value: track.trackID)]
self.timeRange = timeRange
self.videoTrackID = track.trackID
self.transform = transform
self.targetSize = targetSize
self.overlayLayer = overlayLayer
super.init()
}
}

View file

@ -0,0 +1,122 @@
//
// LayerVideoCompositor.swift
// LayerVideoCompositor
//
// Created by Sami Samhuri on 2016-09-05.
// Copyright © 2016 Guru Logic Inc. All rights reserved.
//
import Foundation
import Dispatch
import AVFoundation
import CoreImage
enum LayerVideoCompositingError: Error {
case invalidRequest
case sourceFrameBuffer
case overlayTextLayer
}
final class LayerVideoCompositor: NSObject, AVVideoCompositing {
private let queue = DispatchQueue(label: "ca.gurulogic.layer-video-compositor.render", qos: .default)
private var renderContext: AVVideoCompositionRenderContext = AVVideoCompositionRenderContext()
private var cancelled: Bool = false
private let ciContext: CIContext = {
if let eaglContext = EAGLContext(api: .openGLES3) ?? EAGLContext(api: .openGLES2) {
return CIContext(eaglContext: eaglContext)
}
return CIContext()
}()
private var cachedOverlaySnapshot: CGImage?
private let colorSpace = CGColorSpaceCreateDeviceRGB()
var supportsWideColorSourceFrames: Bool {
return false
}
private static let pixelFormat = kCVPixelFormatType_32BGRA
let sourcePixelBufferAttributes: [String : Any]? = [
kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: LayerVideoCompositor.pixelFormat),
kCVPixelBufferOpenGLESCompatibilityKey as String : NSNumber(value: true),
]
let requiredPixelBufferAttributesForRenderContext: [String : Any] = [
kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: LayerVideoCompositor.pixelFormat),
kCVPixelBufferOpenGLESCompatibilityKey as String : NSNumber(value: true),
]
func renderContextChanged(_ newRenderContext: AVVideoCompositionRenderContext) {
renderContext = newRenderContext
}
func startRequest(_ request: AVAsynchronousVideoCompositionRequest) {
queue.async {
guard !self.cancelled else {
request.finishCancelledRequest()
return
}
do {
let renderedBuffer = try self.renderFrame(forRequest: request)
request.finish(withComposedVideoFrame: renderedBuffer)
}
catch {
request.finish(with: error)
}
}
}
func cancelAllPendingVideoCompositionRequests() {
cancelled = true
queue.async(flags: .barrier) {
self.cancelled = false
}
}
private func overlaySnapshot(layer: CALayer) throws -> CGImage {
if let cachedSnapshot = cachedOverlaySnapshot {
return cachedSnapshot
}
layer.isGeometryFlipped = true
let size = layer.bounds.size
let w = Int(size.width)
let h = Int(size.height)
guard let context = CGContext(data: nil, width: w, height: h, bitsPerComponent: 8, bytesPerRow: 4 * w, space: colorSpace, bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue) else { throw NSError() }
layer.render(in: context)
guard let snapshot = context.makeImage() else { throw NSError() }
cachedOverlaySnapshot = snapshot
return snapshot
}
private func renderFrame(forRequest request: AVAsynchronousVideoCompositionRequest) throws -> CVPixelBuffer {
return try autoreleasepool {
guard let instruction = request.videoCompositionInstruction as? LayerVideoCompositionInstruction else {
throw LayerVideoCompositingError.invalidRequest
}
guard let videoFrameBuffer = request.sourceFrame(byTrackID: instruction.videoTrackID) else {
// Try to be resilient in the face of errors. If we can't even generate a blank frame then fail.
if let blankBuffer = renderContext.newPixelBuffer() {
return blankBuffer
}
else {
throw LayerVideoCompositingError.sourceFrameBuffer
}
}
let frameImage = CIImage(cvPixelBuffer: videoFrameBuffer).applying(instruction.transform)
guard let layer = instruction.overlayLayer, let overlayImage = try? CIImage(cgImage: overlaySnapshot(layer: layer)),
let composeFilter = CIFilter(name: "CISourceAtopCompositing") else {
throw LayerVideoCompositingError.overlayTextLayer
}
composeFilter.setValue(frameImage, forKey: kCIInputBackgroundImageKey)
composeFilter.setValue(overlayImage, forKey: kCIInputImageKey)
guard let outputImage = composeFilter.outputImage,
let renderedBuffer = renderContext.newPixelBuffer() else {
throw LayerVideoCompositingError.overlayTextLayer
}
ciContext.render(outputImage, to: renderedBuffer, bounds: outputImage.extent, colorSpace: self.colorSpace)
return renderedBuffer
}
}
}

View file

@ -7,19 +7,97 @@
//
import UIKit
import AVFoundation
class ViewController: UIViewController {
@IBOutlet private var label: UILabel?
@IBOutlet private var indicator: UIActivityIndicatorView?
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
let path = Bundle.main.path(forResource: "video.mov", ofType: nil)
let url = URL(fileURLWithPath: path!)
let start = Date()
overlayTextOnVideo(videoURL: url) { maybeURL in
DispatchQueue.main.async {
self.indicator?.stopAnimating()
guard let url = maybeURL else {
self.label?.text = "Error. See console for details."
return
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
let end = Date()
let duration = end.timeIntervalSince1970 - start.timeIntervalSince1970
print("Exported in \(duration) seconds.")
self.label?.text = "Done. Video is in the Documents folder which you can access with iTunes, or an app like iMazing or iExplorer."
let player = AVPlayer(url: url)
let layer = AVPlayerLayer(player: player)
let y = 16 + (self.label?.frame.maxY ?? 0)
let width = self.view.bounds.width
layer.frame = CGRect(x: 0, y: y, width: width, height: 9 / 16 * width)
self.view.layer.addSublayer(layer)
player.play()
}
}
}
private func newOverlayLayer(size: CGSize, text: String) -> CALayer {
let margin: CGFloat = 16
let textHeight: CGFloat = 120
let textLayer = CATextLayer()
textLayer.alignmentMode = kCAAlignmentCenter
textLayer.fontSize = 96
textLayer.frame = CGRect(x: margin, y: margin, width: size.width - 2 * margin, height: textHeight)
textLayer.string = text
textLayer.foregroundColor = UIColor(white: 1, alpha: 0.7).cgColor
textLayer.shadowColor = UIColor.black.cgColor
textLayer.shadowOpacity = 0.8
let overlayLayer = CALayer()
overlayLayer.frame = CGRect(origin: .zero, size: size)
overlayLayer.addSublayer(textLayer)
return overlayLayer
}
private func overlayTextOnVideo(videoURL: URL, completion: @escaping (URL?) -> Void) {
let asset = AVURLAsset(url: videoURL)
let videoTracks = asset.tracks(withMediaType: AVMediaTypeVideo)
guard let sourceVideoTrack = videoTracks.first else {
print("error: asset has no video tracks")
completion(nil)
return
}
let timeRange = CMTimeRange(start: kCMTimeZero, duration: asset.duration)
let videoComposition = AVMutableVideoComposition(propertiesOf: asset)
videoComposition.customVideoCompositorClass = LayerVideoCompositor.self
let overlayLayer = newOverlayLayer(size: sourceVideoTrack.naturalSize, text: "Layeriffic!")
let instruction = LayerVideoCompositionInstruction(track: sourceVideoTrack, timeRange: timeRange, overlayLayer: overlayLayer, transform: sourceVideoTrack.preferredTransform, targetSize: sourceVideoTrack.naturalSize)
videoComposition.instructions = [instruction]
let documentDir = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first!
let path = documentDir.appending("/export.mov")
let outputURL = URL(fileURLWithPath: path)
_ = try? FileManager.default.removeItem(at: outputURL)
guard let presetName = AVAssetExportSession.exportPresets(compatibleWith: asset).first,
let exportSession = AVAssetExportSession(asset: asset, presetName: presetName) else {
print("failed to create asset export session")
completion(nil)
return
}
exportSession.videoComposition = videoComposition
exportSession.outputFileType = AVFileTypeMPEG4
exportSession.outputURL = outputURL
exportSession.exportAsynchronously {
guard exportSession.status == .completed else {
print("export failed: \(exportSession.error)")
completion(nil)
return
}
completion(outputURL)
}
}
}

BIN
LayerVideoCompositor/video.mov Executable file

Binary file not shown.