I’ve an AVURLAsset
array. It comprises movies which were taken with the again digital camera, entrance digital camera (not mirrored), photos became a video. All in portrait orientation.
I created a operate to return a single merged video to play again.
My challenge is the unique presentation of every video isn’t supported within the remaining play again. A video is both rotated, stretched/squished, or mirrored.
I’ve tried to comply with current SO posts however nothing has labored.
The beneath operate is the place I’ve landed so far. Any steering could be extraordinarily appreciated.
func merge(property: [AVURLAsset], completion: @escaping (URL?, AVAssetExportSession?) -> Void) {
let mainComposition = AVMutableComposition()
var lastTime = CMTime.zero
guard let videoCompositionTrack = mainComposition.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else { return }
guard let audioCompositionTrack = mainComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else { return }
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoCompositionTrack)
// Add media gadgets to the composition
for asset in property {
if let videoTrack = asset.tracks(withMediaType: .video)[safe: 0] {
let t = videoTrack.preferredTransform
layerInstruction.setTransform(t, at: lastTime)
if let audioTrack = asset.tracks(withMediaType: .audio)[safe: 0] {
do {
strive videoCompositionTrack.insertTimeRange(CMTimeRangeMake(begin: .zero, length: asset.length), of: videoTrack, at: lastTime)
strive audioCompositionTrack.insertTimeRange(CMTimeRangeMake(begin: .zero, length: asset.length), of: audioTrack, at: lastTime)
print("Inserted audio + video monitor")
} catch {
print("Did not insert audio or video monitor")
return
}
lastTime = CMTimeAdd(lastTime, asset.length)
} else {
do {
strive videoCompositionTrack.insertTimeRange(CMTimeRangeMake(begin: .zero, length: asset.length), of: videoTrack, at: lastTime)
print("Inserted simply video monitor")
} catch {
print("Did not insert simply video monitor")
return
}
lastTime = CMTimeAdd(lastTime, asset.length)
}
}
}
let outputUrl = NSURL.fileURL(withPath: NSTemporaryDirectory() + "take a look at" + ".mp4")
let videoComposition = AVMutableVideoComposition()
let instruction = AVMutableVideoCompositionInstruction()
instruction.layerInstructions = [layerInstruction]
instruction.timeRange = videoCompositionTrack.timeRange
videoComposition.directions = [instruction]
videoComposition.frameDuration = videoCompositionTrack.minFrameDuration
videoComposition.renderSize = CGSize(width: 720, peak: 1280) // Regulate as per your video dimensions
guard let exporter = AVAssetExportSession(asset: mainComposition, presetName: AVAssetExportPresetHighestQuality) else { return }
exporter.outputURL = outputUrl
exporter.outputFileType = .mp4
exporter.shouldOptimizeForNetworkUse = true
exporter.videoComposition = videoComposition
exporter.exportAsynchronously {
if let outputUrl = exporter.outputURL {
completion(outputUrl, exporter)
}
}
play(video: exporter.asset)
}