Add a nicer API on top of settings dictionaries

This commit is contained in:
Sami Samhuri 2024-07-07 23:43:19 -07:00
parent c1a6555c22
commit f49cc722d4
No known key found for this signature in database
8 changed files with 436 additions and 153 deletions

View file

@ -17,6 +17,9 @@
7BC5FC792C3B90F70090B757 /* AutoDestructingURL.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7BC5FC782C3B90F70090B757 /* AutoDestructingURL.swift */; };
7BC5FC8A2C3BAA150090B757 /* ExportSession+Error.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7BC5FC892C3BAA150090B757 /* ExportSession+Error.swift */; };
7BC5FC8C2C3BB0180090B757 /* AVAsset+sending.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7BC5FC8B2C3BB0180090B757 /* AVAsset+sending.swift */; };
7BC5FC902C3BB2030090B757 /* AudioOutputSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7BC5FC8F2C3BB2030090B757 /* AudioOutputSettings.swift */; };
7BC5FC922C3BB4BD0090B757 /* VideoOutputSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7BC5FC912C3BB4BD0090B757 /* VideoOutputSettings.swift */; };
7BC5FC942C3BC3AD0090B757 /* CMTime+seconds.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7BC5FC932C3BC3AD0090B757 /* CMTime+seconds.swift */; };
/* End PBXBuildFile section */
/* Begin PBXContainerItemProxy section */
@ -41,6 +44,9 @@
7BC5FC782C3B90F70090B757 /* AutoDestructingURL.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AutoDestructingURL.swift; sourceTree = "<group>"; };
7BC5FC892C3BAA150090B757 /* ExportSession+Error.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "ExportSession+Error.swift"; sourceTree = "<group>"; };
7BC5FC8B2C3BB0180090B757 /* AVAsset+sending.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAsset+sending.swift"; sourceTree = "<group>"; };
7BC5FC8F2C3BB2030090B757 /* AudioOutputSettings.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioOutputSettings.swift; sourceTree = "<group>"; };
7BC5FC912C3BB4BD0090B757 /* VideoOutputSettings.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoOutputSettings.swift; sourceTree = "<group>"; };
7BC5FC932C3BC3AD0090B757 /* CMTime+seconds.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CMTime+seconds.swift"; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFileSystemSynchronizedRootGroup section */
@ -87,7 +93,10 @@
7B9BC00B2C305D2C00C160C2 /* SJSAssetExportSession */ = {
isa = PBXGroup;
children = (
7BC5FC8F2C3BB2030090B757 /* AudioOutputSettings.swift */,
7BC5FC912C3BB4BD0090B757 /* VideoOutputSettings.swift */,
7BC5FC8B2C3BB0180090B757 /* AVAsset+sending.swift */,
7BC5FC932C3BC3AD0090B757 /* CMTime+seconds.swift */,
7B9BC0272C30612C00C160C2 /* ExportSession.swift */,
7BC5FC892C3BAA150090B757 /* ExportSession+Error.swift */,
7B7AE3082C36615700DB7391 /* SampleWriter.swift */,
@ -220,9 +229,12 @@
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
7BC5FC942C3BC3AD0090B757 /* CMTime+seconds.swift in Sources */,
7B7AE3092C36615700DB7391 /* SampleWriter.swift in Sources */,
7BC5FC902C3BB2030090B757 /* AudioOutputSettings.swift in Sources */,
7B9BC0282C30612C00C160C2 /* ExportSession.swift in Sources */,
7BC5FC8C2C3BB0180090B757 /* AVAsset+sending.swift in Sources */,
7BC5FC922C3BB4BD0090B757 /* VideoOutputSettings.swift in Sources */,
7B9BC00E2C305D2C00C160C2 /* SJSAssetExportSession.docc in Sources */,
7BC5FC8A2C3BAA150090B757 /* ExportSession+Error.swift in Sources */,
);

View file

@ -0,0 +1,62 @@
//
// AudioSettings.swift
// SJSAssetExportSession
//
// Created by Sami Samhuri on 2024-07-07.
//
public import AVFoundation
public struct AudioOutputSettings {
public enum Format {
case aac
case mp3
var formatID: AudioFormatID {
switch self {
case .aac: kAudioFormatMPEG4AAC
case .mp3: kAudioFormatMPEGLayer3
}
}
}
let format: AudioFormatID
let channels: Int
let sampleRate: Int?
let mix: AVAudioMix?
public static var `default`: AudioOutputSettings {
.format(.aac).channels(2).sampleRate(44_100)
}
public static func format(_ format: Format) -> AudioOutputSettings {
.init(format: format.formatID, channels: 2, sampleRate: nil, mix: nil)
}
public func channels(_ channels: Int) -> AudioOutputSettings {
.init(format: format, channels: channels, sampleRate: sampleRate, mix: mix)
}
public func sampleRate(_ sampleRate: Int?) -> AudioOutputSettings {
.init(format: format, channels: channels, sampleRate: sampleRate, mix: mix)
}
public func mix(_ mix: sending AVAudioMix?) -> AudioOutputSettings {
.init(format: format, channels: channels, sampleRate: sampleRate, mix: mix)
}
var settingsDictionary: [String: any Sendable] {
if let sampleRate {
[
AVFormatIDKey: format,
AVNumberOfChannelsKey: NSNumber(value: channels),
AVSampleRateKey: NSNumber(value: Float(sampleRate)),
]
} else {
[
AVFormatIDKey: format,
AVNumberOfChannelsKey: NSNumber(value: channels),
]
}
}
}

View file

@ -0,0 +1,14 @@
//
// CMTime+seconds.swift
// SJSAssetExportSession
//
// Created by Sami Samhuri on 2024-07-07.
//
public import CoreMedia
public extension CMTime {
static func seconds(_ seconds: TimeInterval) -> CMTime {
CMTime(seconds: seconds, preferredTimescale: 600)
}
}

View file

@ -15,7 +15,6 @@ extension ExportSession {
case cannotAddAudioOutput
case cannotAddVideoInput
case cannotAddVideoOutput
case videoSettingsEmpty
case videoSettingsInvalid
case videoTracksEmpty
@ -33,8 +32,6 @@ extension ExportSession {
"Can't add video input to writer"
case .cannotAddVideoOutput:
"Can't add video output to reader"
case .videoSettingsEmpty:
"Must provide video output settings"
case .videoSettingsInvalid:
"Invalid video output settings"
case .videoTracksEmpty:

View file

@ -7,7 +7,6 @@
public import AVFoundation
public final class ExportSession: @unchecked Sendable {
// @unchecked Sendable because progress properties are mutable, it's safe though.
@ -23,28 +22,59 @@ public final class ExportSession: @unchecked Sendable {
}
}
public func export(
asset: sending AVAsset,
optimizeForNetworkUse: Bool = false,
timeRange: CMTimeRange? = nil,
audio: sending AudioOutputSettings = .default,
video: sending VideoOutputSettings,
to outputURL: URL,
as fileType: AVFileType
) async throws {
let videoComposition = try await AVMutableVideoComposition.videoComposition(
withPropertiesOf: asset
).applyingSettings(video)
let sampleWriter = try await SampleWriter(
asset: asset,
audioOutputSettings: audio.settingsDictionary,
audioMix: audio.mix,
videoOutputSettings: video.settingsDictionary,
videoComposition: videoComposition,
timeRange: timeRange,
optimizeForNetworkUse: optimizeForNetworkUse,
outputURL: outputURL,
fileType: fileType
)
Task { [progressContinuation] in
for await progress in await sampleWriter.progressStream {
progressContinuation?.yield(progress)
}
}
try await sampleWriter.writeSamples()
}
/**
Exports the given asset using all of the other parameters to transform it in some way.
- Parameters:
- asset: The source asset to export. This can be any kind of `AVAsset` including subclasses such as `AVComposition`.
- audioMix: An optional mix that can be used to manipulate the audio in some way.
- audioOutputSettings: Audio settings using [audio settings keys from AVFoundation](https://developer.apple.com/documentation/avfoundation/audio_settings) and values must be suitable for consumption by Objective-C. Required keys are:
- `AVFormatIDKey` with the typical value `kAudioFormatMPEG4AAC`
- `AVNumberOfChannelsKey` with the typical value `NSNumber(value: 2)` or `AVChannelLayoutKey` with an instance of `AVAudioChannelLayout`
- videoComposition: Used to manipulate the video in some way. This can be used to scale the video, apply filters, amongst other edits.
- videoOutputSettings: Video settings using [video settings keys from AVFoundation](https://developer.apple.com/documentation/avfoundation/video_settings) and values must be suitable for consumption by Objective-C. Required keys are:
- `AVVideoCodecKey` with the typical value `AVVideoCodecType.h264.rawValue` or `AVVideoCodecType.hevc.rawValue`
- `AVVideoWidthKey` with an integer as an `NSNumber`
- `AVVideoHeightKey` with an integer as an `NSNumber`
- optimizeForNetworkUse: Setting this value to `true` writes the output file in a form that enables a player to begin playing the media after downloading only a small portion of it. Defaults to `false`.
- timeRange: Providing a time range exports a subset of the asset instead of the entire duration, which is the default behaviour.
- optimizeForNetworkUse: Setting this value to `true` writes the output file in a form that enables a player to begin playing the media after downloading only a small portion of it. Defaults to `false`.
- audioOutputSettings: Audio settings using [audio settings keys from AVFoundation](https://developer.apple.com/documentation/avfoundation/audio_settings) and values must be suitable for consumption by Objective-C. Required keys are:
- `AVFormatIDKey` with the typical value `kAudioFormatMPEG4AAC`
- `AVNumberOfChannelsKey` with the typical value `NSNumber(value: 2)` or `AVChannelLayoutKey` with an instance of `AVAudioChannelLayout` for use with more than 2 channels.
- mix: An optional mix that can be used to manipulate the audio in some way.
- videoOutputSettings: Video settings using [video settings keys from AVFoundation](https://developer.apple.com/documentation/avfoundation/video_settings) and values must be suitable for consumption by Objective-C. Required keys are:
- `AVVideoCodecKey` with the typical value `AVVideoCodecType.h264.rawValue` or `AVVideoCodecType.hevc.rawValue`
- `AVVideoWidthKey` with an integer as an `NSNumber`, optional when a video composition is given
- `AVVideoHeightKey` with an integer as an `NSNumber`, optional when a video composition is given
- composition: An optional composition that can be used to manipulate the video in some way. This can scale the video, apply filters, or ramp audio volume, amongst other edits.
- outputURL: The file URL where the exported video will be written.
@ -52,21 +82,40 @@ public final class ExportSession: @unchecked Sendable {
*/
public func export(
asset: sending AVAsset,
audioMix: sending AVAudioMix?,
audioOutputSettings: [String: (any Sendable)],
videoComposition: sending AVVideoComposition,
videoOutputSettings: [String: (any Sendable)],
timeRange: CMTimeRange? = nil,
optimizeForNetworkUse: Bool = false,
timeRange: CMTimeRange? = nil,
audioOutputSettings: [String: (any Sendable)],
mix: sending AVAudioMix? = nil,
videoOutputSettings: [String: (any Sendable)],
composition: sending AVVideoComposition? = nil,
to outputURL: URL,
as fileType: AVFileType
) async throws {
let videoComposition: AVVideoComposition =
if let composition { composition }
else if let width = (videoOutputSettings[AVVideoWidthKey] as? NSNumber)?.intValue,
let height = (videoOutputSettings[AVVideoHeightKey] as? NSNumber)?.intValue
{
try await AVMutableVideoComposition.videoComposition(
withPropertiesOf: asset
).applyingSettings(.codec(.h264, width: width, height: height))
} else {
try await AVMutableVideoComposition.videoComposition(
withPropertiesOf: asset
)
}
var videoOutputSettings = videoOutputSettings
if videoOutputSettings[AVVideoWidthKey] == nil || videoOutputSettings[AVVideoHeightKey] == nil {
let size = videoComposition.renderSize
videoOutputSettings[AVVideoWidthKey] = NSNumber(value: Int(size.width))
videoOutputSettings[AVVideoHeightKey] = NSNumber(value: Int(size.height))
}
let sampleWriter = try await SampleWriter(
asset: asset,
audioMix: audioMix,
audioOutputSettings: audioOutputSettings,
videoComposition: videoComposition,
audioMix: mix,
videoOutputSettings: videoOutputSettings,
videoComposition: videoComposition,
timeRange: timeRange,
optimizeForNetworkUse: optimizeForNetworkUse,
outputURL: outputURL,

View file

@ -32,10 +32,10 @@ actor SampleWriter {
}
private var progressContinuation: AsyncStream<Float>.Continuation?
private let audioMix: AVAudioMix?
private let audioOutputSettings: [String: (any Sendable)]
private let videoComposition: AVVideoComposition?
private let audioMix: AVAudioMix?
private let videoOutputSettings: [String: (any Sendable)]
private let videoComposition: AVVideoComposition?
private let reader: AVAssetReader
private let writer: AVAssetWriter
private let duration: CMTime
@ -47,15 +47,17 @@ actor SampleWriter {
init(
asset: sending AVAsset,
audioMix: AVAudioMix?,
audioOutputSettings: sending [String: (any Sendable)],
videoComposition: AVVideoComposition,
audioMix: AVAudioMix?,
videoOutputSettings: sending [String: (any Sendable)],
videoComposition: AVVideoComposition,
timeRange: CMTimeRange? = nil,
optimizeForNetworkUse: Bool = false,
outputURL: URL,
fileType: AVFileType
) async throws {
precondition(!videoOutputSettings.isEmpty)
let duration =
if let timeRange { timeRange.duration } else { try await asset.load(.duration) }
let reader = try AVAssetReader(asset: asset)
@ -69,15 +71,15 @@ actor SampleWriter {
try Self.validateAudio(tracks: audioTracks, outputSettings: audioOutputSettings, writer: writer)
let videoTracks = try await asset.sendTracks(withMediaType: .video)
try Self.validateVideo(tracks: videoTracks, outputSettings: videoOutputSettings, writer: writer)
Self.warnAboutMismatchedVideoDimensions(
Self.warnAboutMismatchedVideoSize(
renderSize: videoComposition.renderSize,
settings: videoOutputSettings
)
self.audioMix = audioMix
self.audioOutputSettings = audioOutputSettings
self.videoComposition = videoComposition
self.audioMix = audioMix
self.videoOutputSettings = videoOutputSettings
self.videoComposition = videoComposition
self.reader = reader
self.writer = writer
self.duration = duration
@ -250,13 +252,12 @@ actor SampleWriter {
writer: AVAssetWriter
) throws {
guard !tracks.isEmpty else { throw Error.setupFailure(.videoTracksEmpty) }
guard !outputSettings.isEmpty else { throw Error.setupFailure(.videoSettingsEmpty) }
guard writer.canApply(outputSettings: outputSettings, forMediaType: .video) else {
throw Error.setupFailure(.videoSettingsInvalid)
}
}
private static func warnAboutMismatchedVideoDimensions(
private static func warnAboutMismatchedVideoSize(
renderSize: CGSize,
settings: [String: any Sendable]
) {

View file

@ -0,0 +1,146 @@
//
// VideoOutputSettings.swift
// SJSAssetExportSession
//
// Created by Sami Samhuri on 2024-07-07.
//
internal import AVFoundation
public struct VideoOutputSettings {
public enum H264Profile {
case baselineAuto, baseline30, baseline31, baseline41
case mainAuto, main31, main32, main41
case highAuto, high40, high41
var level: String {
switch self {
case .baselineAuto: AVVideoProfileLevelH264BaselineAutoLevel
case .baseline30: AVVideoProfileLevelH264Baseline30
case .baseline31: AVVideoProfileLevelH264Baseline31
case .baseline41: AVVideoProfileLevelH264Baseline41
case .mainAuto: AVVideoProfileLevelH264MainAutoLevel
case .main31: AVVideoProfileLevelH264Main31
case .main32: AVVideoProfileLevelH264Main32
case .main41: AVVideoProfileLevelH264Main41
case .highAuto: AVVideoProfileLevelH264HighAutoLevel
case .high40: AVVideoProfileLevelH264High40
case .high41: AVVideoProfileLevelH264High41
}
}
}
public enum Codec {
case h264(H264Profile)
case hevc
static var h264: Codec {
.h264(.highAuto)
}
var stringValue: String {
switch self {
case .h264: AVVideoCodecType.h264.rawValue
case .hevc: AVVideoCodecType.hevc.rawValue
}
}
var profileLevel: String? {
switch self {
case let .h264(profile): profile.level
case .hevc: nil
}
}
}
public enum Color {
case sdr, hdr
var properties: [String: any Sendable] {
switch self {
case .sdr:
[
AVVideoColorPrimariesKey: AVVideoColorPrimaries_ITU_R_709_2,
AVVideoTransferFunctionKey: AVVideoTransferFunction_ITU_R_709_2,
AVVideoYCbCrMatrixKey: AVVideoYCbCrMatrix_ITU_R_709_2,
]
case .hdr:
[
AVVideoColorPrimariesKey: AVVideoColorPrimaries_ITU_R_2020,
AVVideoTransferFunctionKey: AVVideoTransferFunction_ITU_R_2100_HLG,
AVVideoYCbCrMatrixKey: AVVideoYCbCrMatrix_ITU_R_2020,
]
}
}
}
let codec: Codec
let size: CGSize
let fps: Int?
let bitrate: Int?
let color: Color?
public static func codec(_ codec: Codec, size: CGSize) -> VideoOutputSettings {
.init(codec: codec, size: size, fps: nil, bitrate: nil, color: nil)
}
public static func codec(_ codec: Codec, width: Int, height: Int) -> VideoOutputSettings {
.codec(codec, size: CGSize(width: width, height: height))
}
public func fps(_ fps: Int?) -> VideoOutputSettings {
.init(codec: codec, size: size, fps: fps, bitrate: bitrate, color: color)
}
public func bitrate(_ bitrate: Int?) -> VideoOutputSettings {
.init(codec: codec, size: size, fps: fps, bitrate: bitrate, color: color)
}
public func color(_ color: Color?) -> VideoOutputSettings {
.init(codec: codec, size: size, fps: fps, bitrate: bitrate, color: color)
}
var settingsDictionary: [String: any Sendable] {
var result: [String: any Sendable] = [
AVVideoCodecKey: codec.stringValue,
AVVideoWidthKey: NSNumber(value: Int(size.width)),
AVVideoHeightKey: NSNumber(value: Int(size.height)),
]
var compressionDict: [String: any Sendable] = [:]
if let profileLevel = codec.profileLevel {
compressionDict[AVVideoProfileLevelKey] = profileLevel
}
if let bitrate {
compressionDict[AVVideoAverageBitRateKey] = NSNumber(value: bitrate)
}
if !compressionDict.isEmpty {
result[AVVideoCompressionPropertiesKey] = compressionDict
}
if let color {
result[AVVideoColorPropertiesKey] = color.properties
}
return result
}
}
extension AVMutableVideoComposition {
func applyingSettings(_ settings: VideoOutputSettings) -> AVMutableVideoComposition {
renderSize = settings.size
if let fps = settings.fps {
sourceTrackIDForFrameTiming = kCMPersistentTrackID_Invalid
frameDuration = CMTime(seconds: 1.0 / Double(fps), preferredTimescale: 600)
}
switch settings.color {
case nil: break
case .sdr:
colorPrimaries = AVVideoColorPrimaries_ITU_R_709_2
colorTransferFunction = AVVideoTransferFunction_ITU_R_709_2
colorYCbCrMatrix = AVVideoYCbCrMatrix_ITU_R_709_2
case .hdr:
colorPrimaries = AVVideoColorPrimaries_ITU_R_2020
colorTransferFunction = AVVideoTransferFunction_ITU_R_2100_HLG
colorYCbCrMatrix = AVVideoYCbCrMatrix_ITU_R_2020
}
return self
}
}

View file

@ -10,23 +10,6 @@ import AVFoundation
import Testing
final class ExportSessionTests {
private let defaultAudioSettings: [String: any Sendable] = [
AVFormatIDKey: kAudioFormatMPEG4AAC,
AVNumberOfChannelsKey: NSNumber(value: 2),
AVSampleRateKey: NSNumber(value: 44_100.0),
]
private func defaultVideoSettings(size: CGSize, bitrate: Int? = nil) -> [String: any Sendable] {
let compressionProperties: [String: any Sendable] =
if let bitrate { [AVVideoAverageBitRateKey: NSNumber(value: bitrate)] } else { [:] }
return [
AVVideoCodecKey: AVVideoCodecType.h264.rawValue,
AVVideoWidthKey: NSNumber(value: Int(size.width)),
AVVideoHeightKey: NSNumber(value: Int(size.height)),
AVVideoCompressionPropertiesKey: compressionProperties,
]
}
private func resourceURL(named name: String, withExtension ext: String) -> URL {
Bundle(for: Self.self).url(forResource: name, withExtension: ext)!
}
@ -37,15 +20,10 @@ final class ExportSessionTests {
])
}
private func makeFilename(function: String = #function) -> String {
private func makeTemporaryURL(function: String = #function) -> AutoDestructingURL {
let timestamp = Int(Date.now.timeIntervalSince1970)
let f = function.replacing(/[\(\)]/, with: { _ in "" })
let filename = "\(Self.self)_\(f)_\(timestamp).mp4"
return filename
}
private func makeTemporaryURL(function: String = #function) -> AutoDestructingURL {
let filename = makeFilename(function: function)
let url = URL.temporaryDirectory.appending(component: filename)
return AutoDestructingURL(url: url)
}
@ -53,8 +31,7 @@ final class ExportSessionTests {
private func makeVideoComposition(
assetURL: URL,
size: CGSize? = nil,
fps: Int? = nil,
removeHDR: Bool = false
fps: Int? = nil
) async throws -> sending AVMutableVideoComposition {
let asset = makeAsset(url: assetURL)
let videoComposition = try await AVMutableVideoComposition.videoComposition(
@ -68,41 +45,75 @@ final class ExportSessionTests {
videoComposition.sourceTrackIDForFrameTiming = kCMPersistentTrackID_Invalid
videoComposition.frameDuration = CMTime(seconds: seconds, preferredTimescale: 600)
}
if removeHDR {
videoComposition.colorPrimaries = AVVideoColorPrimaries_ITU_R_709_2
videoComposition.colorTransferFunction = AVVideoTransferFunction_ITU_R_709_2
videoComposition.colorYCbCrMatrix = AVVideoYCbCrMatrix_ITU_R_709_2
}
return videoComposition
}
@Test func test_export_720p_h264_24fps() async throws {
@Test func test_sugary_export_720p_h264_24fps() async throws {
let sourceURL = resourceURL(named: "test-4k-hdr-hevc-30fps", withExtension: "mov")
let sourceAsset = makeAsset(url: sourceURL)
let size = CGSize(width: 1280, height: 720)
let duration = CMTime(seconds: 1, preferredTimescale: 600)
let videoComposition = try await makeVideoComposition(
assetURL: sourceURL,
size: size,
fps: 24,
removeHDR: true
)
let destinationURL = makeTemporaryURL()
let subject = ExportSession()
try await subject.export(
asset: sourceAsset,
audioMix: nil,
audioOutputSettings: defaultAudioSettings,
videoComposition: videoComposition,
videoOutputSettings: defaultVideoSettings(size: size, bitrate: 1_000_000),
timeRange: CMTimeRange(start: .zero, duration: duration),
asset: makeAsset(url: sourceURL),
timeRange: CMTimeRange(start: .zero, duration: .seconds(1)),
video: .codec(.h264, width: 1280, height: 720)
.fps(24)
.bitrate(1_000_000)
.color(.sdr),
to: destinationURL.url,
as: .mp4
)
let exportedAsset = AVURLAsset(url: destinationURL.url)
#expect(try await exportedAsset.load(.duration) == duration)
#expect(try await exportedAsset.load(.duration) == .seconds(1))
// Audio
try #require(try await exportedAsset.sendTracks(withMediaType: .audio).count == 1)
let audioTrack = try #require(await exportedAsset.sendTracks(withMediaType: .audio).first)
let audioFormat = try #require(await audioTrack.load(.formatDescriptions).first)
#expect(audioFormat.mediaType == .audio)
#expect(audioFormat.mediaSubType == .mpeg4AAC)
#expect(audioFormat.audioChannelLayout?.numberOfChannels == 2)
#expect(audioFormat.audioStreamBasicDescription?.mSampleRate == 44_100)
// Video
try #require(await exportedAsset.sendTracks(withMediaType: .video).count == 1)
let videoTrack = try #require(await exportedAsset.sendTracks(withMediaType: .video).first)
#expect(try await videoTrack.load(.naturalSize) == CGSize(width: 1280, height: 720))
#expect(try await videoTrack.load(.nominalFrameRate) == 24.0)
#expect(try await videoTrack.load(.estimatedDataRate) == 1_036_128)
let videoFormat = try #require(await videoTrack.load(.formatDescriptions).first)
#expect(videoFormat.mediaType == .video)
#expect(videoFormat.mediaSubType == .h264)
#expect(videoFormat.extensions[.colorPrimaries] == .colorPrimaries(.itu_R_709_2))
#expect(videoFormat.extensions[.transferFunction] == .transferFunction(.itu_R_709_2))
#expect(videoFormat.extensions[.yCbCrMatrix] == .yCbCrMatrix(.itu_R_709_2))
}
@Test func test_export_720p_h264_24fps() async throws {
let sourceURL = resourceURL(named: "test-4k-hdr-hevc-30fps", withExtension: "mov")
let videoComposition = try await makeVideoComposition(
assetURL: sourceURL,
size: CGSize(width: 1280, height: 720),
fps: 24
)
let destinationURL = makeTemporaryURL()
let subject = ExportSession()
try await subject.export(
asset: makeAsset(url: sourceURL),
timeRange: CMTimeRange(start: .zero, duration: .seconds(1)),
audioOutputSettings: AudioOutputSettings.default.settingsDictionary,
videoOutputSettings: VideoOutputSettings.codec(.h264, width: 1280, height: 720)
.fps(24)
.bitrate(1_000_000)
.color(.sdr)
.settingsDictionary,
composition: videoComposition,
to: destinationURL.url,
as: .mp4
)
let exportedAsset = AVURLAsset(url: destinationURL.url)
#expect(try await exportedAsset.load(.duration) == .seconds(1))
// Audio
try #require(try await exportedAsset.sendTracks(withMediaType: .audio).count == 1)
let audioTrack = try #require(await exportedAsset.sendTracks(withMediaType: .audio).first)
@ -127,31 +138,59 @@ final class ExportSessionTests {
@Test func test_export_default_timerange() async throws {
let sourceURL = resourceURL(named: "test-720p-h264-24fps", withExtension: "mov")
let sourceAsset = makeAsset(url: sourceURL)
let originalDuration = try await sourceAsset.load(.duration)
let videoComposition = try await makeVideoComposition(assetURL: sourceURL)
let destinationURL = makeTemporaryURL()
let subject = ExportSession()
try await subject.export(
asset: sourceAsset,
audioMix: nil,
audioOutputSettings: defaultAudioSettings,
videoComposition: videoComposition,
videoOutputSettings: defaultVideoSettings(size: videoComposition.renderSize),
asset: makeAsset(url: sourceURL),
video: .codec(.h264, size: CGSize(width: 1280, height: 720)),
to: destinationURL.url,
as: .mov
)
let exportedAsset = AVURLAsset(url: destinationURL.url)
#expect(try await exportedAsset.load(.duration) == originalDuration)
#expect(try await exportedAsset.load(.duration) == .seconds(1))
}
@Test func test_export_default_composition_with_size() async throws {
let sourceURL = resourceURL(named: "test-720p-h264-24fps", withExtension: "mov")
let size = CGSize(width: 640, height: 360)
let destinationURL = makeTemporaryURL()
let subject = ExportSession()
try await subject.export(
asset: makeAsset(url: sourceURL),
audioOutputSettings: AudioOutputSettings.default.settingsDictionary,
videoOutputSettings: VideoOutputSettings.codec(.h264, size: size).settingsDictionary,
to: destinationURL.url,
as: .mov
)
let exportedAsset = AVURLAsset(url: destinationURL.url)
let videoTrack = try #require(try await exportedAsset.loadTracks(withMediaType: .video).first)
#expect(try await videoTrack.load(.naturalSize) == size)
}
@Test func test_export_default_composition_without_size() async throws {
let sourceURL = resourceURL(named: "test-720p-h264-24fps", withExtension: "mov")
let destinationURL = makeTemporaryURL()
let subject = ExportSession()
try await subject.export(
asset: makeAsset(url: sourceURL),
audioOutputSettings: AudioOutputSettings.default.settingsDictionary,
videoOutputSettings: [AVVideoCodecKey: AVVideoCodecType.h264.rawValue],
to: destinationURL.url,
as: .mov
)
let exportedAsset = AVURLAsset(url: destinationURL.url)
let exportedTrack = try #require(try await exportedAsset.loadTracks(withMediaType: .video).first)
#expect(try await exportedTrack.load(.naturalSize) == CGSize(width: 1280, height: 720))
}
@Test func test_export_progress() async throws {
let sourceURL = resourceURL(named: "test-720p-h264-24fps", withExtension: "mov")
let sourceAsset = makeAsset(url: sourceURL)
let videoComposition = try await makeVideoComposition(assetURL: sourceURL)
let size = videoComposition.renderSize
let progressValues = SendableWrapper<[Float]>([])
let subject = ExportSession()
@ -161,11 +200,8 @@ final class ExportSessionTests {
}
}
try await subject.export(
asset: sourceAsset,
audioMix: nil,
audioOutputSettings: defaultAudioSettings,
videoComposition: videoComposition,
videoOutputSettings: defaultVideoSettings(size: size),
asset: makeAsset(url: sourceURL),
video: .codec(.h264, width: 1280, height: 720),
to: makeTemporaryURL().url,
as: .mov
)
@ -177,16 +213,15 @@ final class ExportSessionTests {
@Test func test_export_works_with_no_audio() async throws {
let sourceURL = resourceURL(named: "test-no-audio", withExtension: "mp4")
let sourceAsset = makeAsset(url: sourceURL)
let videoComposition = try await makeVideoComposition(assetURL: sourceURL)
let subject = ExportSession()
try await subject.export(
asset: sourceAsset,
audioMix: nil,
audioOutputSettings: [:],
videoComposition: videoComposition,
videoOutputSettings: defaultVideoSettings(size: videoComposition.renderSize),
asset: makeAsset(url: sourceURL),
audioOutputSettings: [:], // Ensure that empty audio settings don't matter w/ no track
videoOutputSettings: VideoOutputSettings
.codec(.h264, size: videoComposition.renderSize).settingsDictionary,
composition: videoComposition,
to: makeTemporaryURL().url,
as: .mov
)
@ -195,16 +230,15 @@ final class ExportSessionTests {
@Test func test_export_throws_with_empty_audio_settings() async throws {
try await #require(throws: ExportSession.Error.setupFailure(.audioSettingsEmpty)) {
let sourceURL = resourceURL(named: "test-720p-h264-24fps", withExtension: "mov")
let sourceAsset = makeAsset(url: sourceURL)
let videoComposition = try await makeVideoComposition(assetURL: sourceURL)
let subject = ExportSession()
try await subject.export(
asset: sourceAsset,
audioMix: nil,
audioOutputSettings: [:],
videoComposition: videoComposition,
videoOutputSettings: defaultVideoSettings(size: videoComposition.renderSize),
asset: makeAsset(url: sourceURL),
audioOutputSettings: [:], // Here it matters because there's an audio track
videoOutputSettings: VideoOutputSettings
.codec(.h264, size: videoComposition.renderSize).settingsDictionary,
composition: videoComposition,
to: makeTemporaryURL().url,
as: .mov
)
@ -214,38 +248,16 @@ final class ExportSessionTests {
@Test func test_export_throws_with_invalid_audio_settings() async throws {
try await #require(throws: ExportSession.Error.setupFailure(.audioSettingsInvalid)) {
let sourceURL = resourceURL(named: "test-720p-h264-24fps", withExtension: "mov")
let sourceAsset = makeAsset(url: sourceURL)
let videoComposition = try await makeVideoComposition(assetURL: sourceURL)
let subject = ExportSession()
try await subject.export(
asset: sourceAsset,
audioMix: nil,
asset: makeAsset(url: sourceURL),
audioOutputSettings: [
AVFormatIDKey: kAudioFormatMPEG4AAC,
AVNumberOfChannelsKey: NSNumber(value: -1), // invalid number of channels
],
videoComposition: videoComposition,
videoOutputSettings: defaultVideoSettings(size: videoComposition.renderSize),
to: makeTemporaryURL().url,
as: .mov
)
}
}
@Test func test_export_throws_with_empty_video_settings() async throws {
try await #require(throws: ExportSession.Error.setupFailure(.videoSettingsEmpty)) {
let sourceURL = resourceURL(named: "test-720p-h264-24fps", withExtension: "mov")
let sourceAsset = makeAsset(url: sourceURL)
let videoComposition = try await makeVideoComposition(assetURL: sourceURL)
let subject = ExportSession()
try await subject.export(
asset: sourceAsset,
audioMix: nil,
audioOutputSettings: defaultAudioSettings,
videoComposition: videoComposition,
videoOutputSettings: [:],
videoOutputSettings: VideoOutputSettings
.codec(.h264, size: CGSize(width: 1280, height: 720)).settingsDictionary,
to: makeTemporaryURL().url,
as: .mov
)
@ -255,41 +267,31 @@ final class ExportSessionTests {
@Test func test_export_throws_with_invalid_video_settings() async throws {
try await #require(throws: ExportSession.Error.setupFailure(.videoSettingsInvalid)) {
let sourceURL = resourceURL(named: "test-720p-h264-24fps", withExtension: "mov")
let sourceAsset = makeAsset(url: sourceURL)
let videoComposition = try await makeVideoComposition(assetURL: sourceURL)
let size = videoComposition.renderSize
let size = CGSize(width: 1280, height: 720)
let subject = ExportSession()
try await subject.export(
asset: sourceAsset,
audioMix: nil,
audioOutputSettings: defaultAudioSettings,
videoComposition: videoComposition,
asset: makeAsset(url: sourceURL),
audioOutputSettings: AudioOutputSettings.default.settingsDictionary,
videoOutputSettings: [
AVVideoCodecKey: AVVideoCodecType.h264.rawValue,
// missing video width
// missing codec
AVVideoWidthKey: NSNumber(value: Int(size.width)),
AVVideoHeightKey: NSNumber(value: Int(size.height)),
],
composition: nil,
to: makeTemporaryURL().url,
as: .mov
)
}
}
@Test func test_export_throws_with_no_video() async throws {
@Test func test_export_throws_with_no_video_track() async throws {
try await #require(throws: ExportSession.Error.setupFailure(.videoTracksEmpty)) {
let sourceURL = resourceURL(named: "test-no-video", withExtension: "m4a")
let sourceAsset = makeAsset(url: sourceURL)
let videoComposition = try await makeVideoComposition(assetURL: sourceURL)
let size = videoComposition.renderSize
let subject = ExportSession()
try await subject.export(
asset: sourceAsset,
audioMix: nil,
audioOutputSettings: defaultAudioSettings,
videoComposition: videoComposition,
videoOutputSettings: defaultVideoSettings(size: size),
asset: makeAsset(url: sourceURL),
video: .codec(.h264, width: 1280, height: 720),
to: makeTemporaryURL().url,
as: .mov
)