Add a basic exporter

This doesn't have a great API yet and hasn't been tested so it may not
even work.
This commit is contained in:
Sami Samhuri 2024-07-03 22:30:19 -07:00
parent 6d464ff2b8
commit 36f055d36f
No known key found for this signature in database
3 changed files with 323 additions and 6 deletions

View file

@ -7,10 +7,12 @@
objects = {
/* Begin PBXBuildFile section */
7B7AE3092C36615700DB7391 /* SampleWriter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B7AE3082C36615700DB7391 /* SampleWriter.swift */; };
7B9BC00E2C305D2C00C160C2 /* SJSAssetExportSession.docc in Sources */ = {isa = PBXBuildFile; fileRef = 7B9BC00D2C305D2C00C160C2 /* SJSAssetExportSession.docc */; };
7B9BC0142C305D2C00C160C2 /* SJSAssetExportSession.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 7B9BC0092C305D2C00C160C2 /* SJSAssetExportSession.framework */; };
7B9BC0192C305D2C00C160C2 /* SJSAssetExportSessionTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B9BC0182C305D2C00C160C2 /* SJSAssetExportSessionTests.swift */; };
7B9BC01A2C305D2C00C160C2 /* SJSAssetExportSession.h in Headers */ = {isa = PBXBuildFile; fileRef = 7B9BC00C2C305D2C00C160C2 /* SJSAssetExportSession.h */; settings = {ATTRIBUTES = (Public, ); }; };
7B9BC0282C30612C00C160C2 /* ExportSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B9BC0272C30612C00C160C2 /* ExportSession.swift */; };
/* End PBXBuildFile section */
/* Begin PBXContainerItemProxy section */
@ -24,11 +26,13 @@
/* End PBXContainerItemProxy section */
/* Begin PBXFileReference section */
7B7AE3082C36615700DB7391 /* SampleWriter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SampleWriter.swift; sourceTree = "<group>"; };
7B9BC0092C305D2C00C160C2 /* SJSAssetExportSession.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = SJSAssetExportSession.framework; sourceTree = BUILT_PRODUCTS_DIR; };
7B9BC00C2C305D2C00C160C2 /* SJSAssetExportSession.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SJSAssetExportSession.h; sourceTree = "<group>"; };
7B9BC00D2C305D2C00C160C2 /* SJSAssetExportSession.docc */ = {isa = PBXFileReference; lastKnownFileType = folder.documentationcatalog; path = SJSAssetExportSession.docc; sourceTree = "<group>"; };
7B9BC0132C305D2C00C160C2 /* SJSAssetExportSessionTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = SJSAssetExportSessionTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
7B9BC0182C305D2C00C160C2 /* SJSAssetExportSessionTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SJSAssetExportSessionTests.swift; sourceTree = "<group>"; };
7B9BC0272C30612C00C160C2 /* ExportSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ExportSession.swift; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
@ -73,6 +77,8 @@
children = (
7B9BC00C2C305D2C00C160C2 /* SJSAssetExportSession.h */,
7B9BC00D2C305D2C00C160C2 /* SJSAssetExportSession.docc */,
7B9BC0272C30612C00C160C2 /* ExportSession.swift */,
7B7AE3082C36615700DB7391 /* SampleWriter.swift */,
);
path = SJSAssetExportSession;
sourceTree = "<group>";
@ -194,6 +200,8 @@
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
7B7AE3092C36615700DB7391 /* SampleWriter.swift in Sources */,
7B9BC0282C30612C00C160C2 /* ExportSession.swift in Sources */,
7B9BC00E2C305D2C00C160C2 /* SJSAssetExportSession.docc in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
@ -276,6 +284,7 @@
ONLY_ACTIVE_ARCH = YES;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
SWIFT_VERSION = 6.0;
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
@ -332,6 +341,7 @@
MTL_ENABLE_DEBUG_INFO = NO;
MTL_FAST_MATH = YES;
SWIFT_COMPILATION_MODE = wholemodule;
SWIFT_VERSION = 6.0;
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
@ -362,7 +372,7 @@
"@executable_path/../Frameworks",
"@loader_path/Frameworks",
);
MACOSX_DEPLOYMENT_TARGET = 14.5;
MACOSX_DEPLOYMENT_TARGET = 15.0;
MARKETING_VERSION = 1.0;
MODULE_VERIFIER_SUPPORTED_LANGUAGES = "objective-c objective-c++";
MODULE_VERIFIER_SUPPORTED_LANGUAGE_STANDARDS = "gnu17 gnu++20";
@ -373,7 +383,6 @@
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx xros xrsimulator";
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_INSTALL_OBJC_HEADER = NO;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2,7";
XROS_DEPLOYMENT_TARGET = 2.0;
};
@ -404,7 +413,7 @@
"@executable_path/../Frameworks",
"@loader_path/Frameworks",
);
MACOSX_DEPLOYMENT_TARGET = 14.5;
MACOSX_DEPLOYMENT_TARGET = 15.0;
MARKETING_VERSION = 1.0;
MODULE_VERIFIER_SUPPORTED_LANGUAGES = "objective-c objective-c++";
MODULE_VERIFIER_SUPPORTED_LANGUAGE_STANDARDS = "gnu17 gnu++20";
@ -415,7 +424,6 @@
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx xros xrsimulator";
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_INSTALL_OBJC_HEADER = NO;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2,7";
XROS_DEPLOYMENT_TARGET = 2.0;
};
@ -437,7 +445,6 @@
SDKROOT = auto;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx xros xrsimulator";
SWIFT_EMIT_LOC_STRINGS = NO;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2,7";
XROS_DEPLOYMENT_TARGET = 2.0;
};
@ -459,7 +466,6 @@
SDKROOT = auto;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx xros xrsimulator";
SWIFT_EMIT_LOC_STRINGS = NO;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2,7";
XROS_DEPLOYMENT_TARGET = 2.0;
};

View file

@ -0,0 +1,52 @@
//
// ExportSession.swift
// SJSAssetExportSession
//
// Created by Sami Samhuri on 2024-06-29.
//
public import AVFoundation
public final class ExportSession {
public enum Error: LocalizedError {
case setupFailure(reason: String)
case readFailure((any Swift.Error)?)
case writeFailure((any Swift.Error)?)
public var errorDescription: String? {
switch self {
case let .setupFailure(reason):
reason
case let .readFailure(underlyingError):
underlyingError?.localizedDescription ?? "Unknown read failure"
case let .writeFailure(underlyingError):
underlyingError?.localizedDescription ?? "Unknown write failure"
}
}
}
public func export(
asset: sending AVAsset,
audioMix: sending AVAudioMix?,
audioOutputSettings: [String: (any Sendable)],
videoComposition: sending AVVideoComposition?,
videoOutputSettings: [String: (any Sendable)],
timeRange: CMTimeRange? = nil,
optimizeForNetworkUse: Bool = false,
to outputURL: URL,
as fileType: AVFileType
) async throws {
let sampleWriter = try await SampleWriter(
asset: asset,
timeRange: timeRange ?? CMTimeRange(start: .zero, duration: .positiveInfinity),
audioMix: audioMix,
audioOutputSettings: audioOutputSettings,
videoComposition: videoComposition,
videoOutputSettings: videoOutputSettings,
optimizeForNetworkUse: optimizeForNetworkUse,
outputURL: outputURL,
fileType: fileType
)
try await sampleWriter.writeSamples()
}
}

View file

@ -0,0 +1,259 @@
//
// SampleWriter.swift
// SJSAssetExportSession
//
// Created by Sami Samhuri on 2024-07-03.
//
import AVFoundation.AVAsset
private extension AVAsset {
func sendTracks(withMediaType mediaType: AVMediaType) async throws -> sending [AVAssetTrack] {
try await loadTracks(withMediaType: mediaType)
}
}
actor SampleWriter {
private let queue = DispatchSerialQueue(
label: "SJSAssetExportSession.SampleWriter",
autoreleaseFrequency: .workItem,
target: .global()
)
public nonisolated var unownedExecutor: UnownedSerialExecutor {
queue.asUnownedSerialExecutor()
}
let audioTracks: [AVAssetTrack]
let audioMix: AVAudioMix?
let audioOutputSettings: [String: (any Sendable)]
let videoTracks: [AVAssetTrack]
let videoComposition: AVVideoComposition?
let videoOutputSettings: [String: (any Sendable)]
let reader: AVAssetReader
let writer: AVAssetWriter
let duration: CMTime
let timeRange: CMTimeRange
private var audioOutput: AVAssetReaderAudioMixOutput?
private var audioInput: AVAssetWriterInput?
private var videoOutput: AVAssetReaderVideoCompositionOutput?
private var videoInput: AVAssetWriterInput?
private var pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor?
init(
asset: sending AVAsset,
timeRange: CMTimeRange,
audioMix: AVAudioMix?,
audioOutputSettings: sending [String: (any Sendable)],
videoComposition: AVVideoComposition?,
videoOutputSettings: sending [String: (any Sendable)],
optimizeForNetworkUse: Bool,
outputURL: URL,
fileType: AVFileType
) async throws {
let duration =
if timeRange.duration.isValid && !timeRange.duration.isPositiveInfinity {
timeRange.duration
} else {
try await asset.load(.duration)
}
let reader = try AVAssetReader(asset: asset)
reader.timeRange = timeRange
let writer = try AVAssetWriter(outputURL: outputURL, fileType: fileType)
writer.shouldOptimizeForNetworkUse = optimizeForNetworkUse
guard writer.canApply(outputSettings: videoOutputSettings, forMediaType: .video) else {
throw ExportSession.Error.setupFailure(reason: "Cannot apply video output settings")
}
self.audioTracks = try await asset.sendTracks(withMediaType: .audio)
self.audioMix = audioMix
self.audioOutputSettings = audioOutputSettings
self.videoTracks = try await asset.sendTracks(withMediaType: .video)
self.videoComposition = videoComposition
self.videoOutputSettings = videoOutputSettings
self.reader = reader
self.writer = writer
self.duration = duration
self.timeRange = timeRange
}
func writeSamples() async throws {
writer.startWriting()
reader.startReading()
writer.startSession(atSourceTime: timeRange.start)
async let audioResult = try encodeAudioTracks(audioTracks)
async let videoResult = try encodeVideoTracks(videoTracks)
_ = try await (audioResult, videoResult)
if reader.status == .cancelled || writer.status == .cancelled {
throw CancellationError()
} else if writer.status == .failed {
reader.cancelReading()
throw ExportSession.Error.writeFailure(writer.error)
} else if reader.status == .failed {
writer.cancelWriting()
throw ExportSession.Error.readFailure(reader.error)
} else {
await withCheckedContinuation { continuation in
writer.finishWriting {
continuation.resume(returning: ())
}
}
}
}
private func encodeAudioTracks(_ audioTracks: [AVAssetTrack]) async throws -> Bool {
guard !audioTracks.isEmpty else { return false }
let audioOutput = AVAssetReaderAudioMixOutput(audioTracks: audioTracks, audioSettings: nil)
guard reader.canAdd(audioOutput) else {
throw ExportSession.Error.setupFailure(reason: "Can't add audio output to reader")
}
reader.add(audioOutput)
self.audioOutput = audioOutput
let audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioOutputSettings)
guard writer.canAdd(audioInput) else {
throw ExportSession.Error.setupFailure(reason: "Can't add audio input to writer")
}
writer.add(audioInput)
self.audioInput = audioInput
return await withCheckedContinuation { continuation in
self.audioInput?.requestMediaDataWhenReady(on: queue) {
let hasMoreSamples = self.assumeIsolated { $0.writeReadyAudioSamples() }
if !hasMoreSamples {
continuation.resume(returning: true)
}
}
}
}
private func writeReadyAudioSamples() -> Bool {
guard let audioOutput, let audioInput else { return true }
while audioInput.isReadyForMoreMediaData {
guard reader.status == .reading && writer.status == .writing,
let sampleBuffer = audioOutput.copyNextSampleBuffer() else {
audioInput.markAsFinished()
NSLog("Finished encoding ready audio samples from \(audioOutput)")
return false
}
guard audioInput.append(sampleBuffer) else {
NSLog("Failed to append audio sample buffer \(sampleBuffer) to input \(audioInput)")
return false
}
}
// Everything was appended successfully, return true indicating there's more to do.
NSLog("Completed encoding ready audio samples, more to come...")
return true
}
private func encodeVideoTracks(_ videoTracks: [AVAssetTrack]) async throws -> Bool {
guard !videoTracks.isEmpty else { return false }
guard let width = videoComposition.map({ Int($0.renderSize.width) })
?? (videoOutputSettings[AVVideoWidthKey] as? NSNumber)?.intValue,
let height = videoComposition.map({ Int($0.renderSize.height) })
?? (videoOutputSettings[AVVideoHeightKey] as? NSNumber)?.intValue else {
throw ExportSession.Error.setupFailure(reason: "Export dimensions must be provided in a video composition or video output settings")
}
let videoOutput = AVAssetReaderVideoCompositionOutput(videoTracks: videoTracks, videoSettings: nil)
videoOutput.alwaysCopiesSampleData = false
videoOutput.videoComposition = videoComposition
guard reader.canAdd(videoOutput) else {
throw ExportSession.Error.setupFailure(reason: "Can't add video output to reader")
}
reader.add(videoOutput)
self.videoOutput = videoOutput
let videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoOutputSettings)
guard writer.canAdd(videoInput) else {
throw ExportSession.Error.setupFailure(reason: "Can't add video input to writer")
}
writer.add(videoInput)
self.videoInput = videoInput
let pixelBufferAttributes: [String: Any] = [
kCVPixelBufferPixelFormatTypeKey as String: NSNumber(integerLiteral: Int(kCVPixelFormatType_32RGBA)),
kCVPixelBufferWidthKey as String: NSNumber(integerLiteral: width),
kCVPixelBufferHeightKey as String: NSNumber(integerLiteral: height),
"IOSurfaceOpenGLESTextureCompatibility": NSNumber(booleanLiteral: true),
"IOSurfaceOpenGLESFBOCompatibility": NSNumber(booleanLiteral: true),
]
pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(
assetWriterInput: videoInput,
sourcePixelBufferAttributes: pixelBufferAttributes
)
return await withCheckedContinuation { continuation in
self.videoInput?.requestMediaDataWhenReady(on: queue) {
let hasMoreSamples = self.assumeIsolated { $0.writeReadyVideoSamples() }
if !hasMoreSamples {
continuation.resume(returning: true)
}
}
}
}
private func writeReadyVideoSamples() -> Bool {
guard let videoOutput, let videoInput, let pixelBufferAdaptor else { return true }
while videoInput.isReadyForMoreMediaData {
guard reader.status == .reading && writer.status == .writing,
let sampleBuffer = videoOutput.copyNextSampleBuffer() else {
videoInput.markAsFinished()
NSLog("Finished encoding ready video samples from \(videoOutput)")
return false
}
let samplePresentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) - timeRange.start
let progress = Float(samplePresentationTime.seconds / duration.seconds)
#warning("TODO: publish progress to an AsyncStream")
guard let pixelBufferPool = pixelBufferAdaptor.pixelBufferPool else {
NSLog("No pixel buffer pool available on adaptor \(pixelBufferAdaptor)")
return false
}
var toRenderBuffer: CVPixelBuffer?
let result = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferPool, &toRenderBuffer)
var handled = false
if result == kCVReturnSuccess, let toBuffer = toRenderBuffer {
handled = pixelBufferAdaptor.append(toBuffer, withPresentationTime: samplePresentationTime)
if !handled { return false }
}
if !handled {
#warning("is this really necessary?! seems like a failure scenario...")
guard videoInput.append(sampleBuffer) else {
NSLog("Failed to append video sample buffer \(sampleBuffer) to input \(videoInput)")
return false
}
}
}
// Everything was appended successfully, return true indicating there's more to do.
NSLog("Completed encoding ready video samples, more to come...")
return true
}
}