diff --git a/Example/OSAT-VideoCompositor.xcodeproj/project.pbxproj b/Example/OSAT-VideoCompositor.xcodeproj/project.pbxproj index 61d1899..e860399 100644 --- a/Example/OSAT-VideoCompositor.xcodeproj/project.pbxproj +++ b/Example/OSAT-VideoCompositor.xcodeproj/project.pbxproj @@ -13,6 +13,8 @@ 607FACDD1AFB9204008FA782 /* Images.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 607FACDC1AFB9204008FA782 /* Images.xcassets */; }; 607FACE01AFB9204008FA782 /* LaunchScreen.xib in Resources */ = {isa = PBXBuildFile; fileRef = 607FACDE1AFB9204008FA782 /* LaunchScreen.xib */; }; 607FACEC1AFB9204008FA782 /* ViewControllerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 607FACEB1AFB9204008FA782 /* ViewControllerTests.swift */; }; + 840E1E2C2A0280FE000CC15E /* portrait.MOV in Resources */ = {isa = PBXBuildFile; fileRef = 840E1E2A2A0280FE000CC15E /* portrait.MOV */; }; + 840E1E2D2A0280FE000CC15E /* landscape.MOV in Resources */ = {isa = PBXBuildFile; fileRef = 840E1E2B2A0280FE000CC15E /* landscape.MOV */; }; 93A7E1A5FA0310303146425B /* Pods_OSAT_VideoCompositor_Tests.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 98C18AA0D7D9B8CAE531EADA /* Pods_OSAT_VideoCompositor_Tests.framework */; }; E5A5504C9043983681BB2997 /* Pods_OSAT_VideoCompositor_Example.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = B0896B476C4F194D82A4048C /* Pods_OSAT_VideoCompositor_Example.framework */; }; F96A6FA9294DC796005B0365 /* videoplayback.mp4 in Resources */ = {isa = PBXBuildFile; fileRef = F96A6FA8294DC796005B0365 /* videoplayback.mp4 */; }; @@ -46,6 +48,8 @@ 607FACEB1AFB9204008FA782 /* ViewControllerTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewControllerTests.swift; sourceTree = ""; }; 69F6BD00C34281A34B25B004 /* README.md */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = net.daringfireball.markdown; name = README.md; path = ../README.md; sourceTree = ""; }; 7BB165D49711770CC07EE07F /* Pods-OSAT-VideoCompositor_Example.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-OSAT-VideoCompositor_Example.release.xcconfig"; path = "Target Support Files/Pods-OSAT-VideoCompositor_Example/Pods-OSAT-VideoCompositor_Example.release.xcconfig"; sourceTree = ""; }; + 840E1E2A2A0280FE000CC15E /* portrait.MOV */ = {isa = PBXFileReference; lastKnownFileType = video.quicktime; path = portrait.MOV; sourceTree = ""; }; + 840E1E2B2A0280FE000CC15E /* landscape.MOV */ = {isa = PBXFileReference; lastKnownFileType = video.quicktime; path = landscape.MOV; sourceTree = ""; }; 87D796BFA4EF030BF62B4C29 /* Pods-OSAT-VideoCompositor_Tests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-OSAT-VideoCompositor_Tests.release.xcconfig"; path = "Target Support Files/Pods-OSAT-VideoCompositor_Tests/Pods-OSAT-VideoCompositor_Tests.release.xcconfig"; sourceTree = ""; }; 98C18AA0D7D9B8CAE531EADA /* Pods_OSAT_VideoCompositor_Tests.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_OSAT_VideoCompositor_Tests.framework; sourceTree = BUILT_PRODUCTS_DIR; }; AB46A411940C8C9F5CA2983A /* Pods-OSAT-VideoCompositor_Example.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-OSAT-VideoCompositor_Example.debug.xcconfig"; path = "Target Support Files/Pods-OSAT-VideoCompositor_Example/Pods-OSAT-VideoCompositor_Example.debug.xcconfig"; sourceTree = ""; }; @@ -108,6 +112,8 @@ F9DCDD202976D4A8002F57A0 /* AVPlayerLayer.swift */, 607FACD71AFB9204008FA782 /* ViewController.swift */, F96A6FA8294DC796005B0365 /* videoplayback.mp4 */, + 840E1E2B2A0280FE000CC15E /* landscape.MOV */, + 840E1E2A2A0280FE000CC15E /* portrait.MOV */, 607FACD91AFB9204008FA782 /* Main.storyboard */, 607FACDC1AFB9204008FA782 /* Images.xcassets */, 607FACDE1AFB9204008FA782 /* LaunchScreen.xib */, @@ -274,6 +280,8 @@ 607FACE01AFB9204008FA782 /* LaunchScreen.xib in Resources */, 607FACDD1AFB9204008FA782 /* Images.xcassets in Resources */, F96A6FA9294DC796005B0365 /* videoplayback.mp4 in Resources */, + 840E1E2D2A0280FE000CC15E /* landscape.MOV in Resources */, + 840E1E2C2A0280FE000CC15E /* portrait.MOV in Resources */, ); runOnlyForDeploymentPostprocessing = 0; }; diff --git a/Example/OSAT-VideoCompositor/ViewController.swift b/Example/OSAT-VideoCompositor/ViewController.swift index b908a85..332e519 100644 --- a/Example/OSAT-VideoCompositor/ViewController.swift +++ b/Example/OSAT-VideoCompositor/ViewController.swift @@ -12,8 +12,8 @@ import UIKit class ViewController: UIViewController { private struct Constants { - static let playButton = "play" - static let pauseButton = "pause" + static let playButton = "play.circle.fill" + static let pauseButton = "pause.circle.fill" static let iconSize: CGFloat = 40 } @@ -84,7 +84,7 @@ class ViewController: UIViewController { let url = Bundle.main.url(forResource: "videoplayback", withExtension: "mp4")! videoPlayerLayer = AVPlayerView(frame: .zero) originalVideoUrl = url - view.backgroundColor = .black + view.backgroundColor = .systemBackground videoPlayerLayer?.set(url: url) videoPlayerLayer?.delegate = self @@ -92,14 +92,15 @@ class ViewController: UIViewController { videoPlayerLayer.translatesAutoresizingMaskIntoConstraints = false navigationItem.title = "OSAT Video Compositer" - navigationItem.rightBarButtonItem = UIBarButtonItem(barButtonSystemItem: .add, target: self, action: nil) + + navigationItem.rightBarButtonItem = UIBarButtonItem(image: UIImage(systemName: "ellipsis.circle"), style: .plain, target: self, action: nil) navigationItem.rightBarButtonItem?.menu = createVideoImageMenu() navigationItem.leftBarButtonItem = UIBarButtonItem(barButtonSystemItem: .compose, target: self, action: nil) navigationItem.leftBarButtonItem?.menu = createWaterMarkMenu() + navigationController?.navigationBar.barStyle = .default - videoPlayerLayer.backgroundColor = .systemGray - videoPlayerLayer.play() + videoPlayerLayer.backgroundColor = .systemGroupedBackground addSubviews() setButtonProperties() setupConstraints() @@ -125,10 +126,9 @@ class ViewController: UIViewController { playerView.topAnchor.constraint(equalTo: view.safeAreaLayoutGuide.topAnchor), playerView.leadingAnchor.constraint(equalTo: view.safeAreaLayoutGuide.leadingAnchor), playerView.trailingAnchor.constraint(equalTo: view.safeAreaLayoutGuide.trailingAnchor), - playerView.bottomAnchor.constraint(equalTo: view.safeAreaLayoutGuide.bottomAnchor, constant: -300), videoPlayerLayer.centerXAnchor.constraint(equalTo: playerView.centerXAnchor), - videoPlayerLayer.centerXAnchor.constraint(equalTo: playerView.centerXAnchor), + videoPlayerLayer.centerYAnchor.constraint(equalTo: playerView.centerYAnchor), videoPlayerLayer.leadingAnchor.constraint(equalTo: playerView.leadingAnchor), videoPlayerLayer.trailingAnchor.constraint(equalTo: playerView.trailingAnchor), videoPlayerLayer.heightAnchor.constraint(equalTo: videoPlayerLayer.widthAnchor, multiplier: 1), @@ -137,17 +137,20 @@ class ViewController: UIViewController { spinner.centerXAnchor.constraint(equalTo: playerView.centerXAnchor), sliderParentView.topAnchor.constraint(equalTo: playerView.bottomAnchor, constant: 10), - sliderParentView.bottomAnchor.constraint(equalTo: view.safeAreaLayoutGuide.bottomAnchor, constant: -60), - sliderParentView.leadingAnchor.constraint(equalTo: videoPlayerLayer.leadingAnchor), - sliderParentView.trailingAnchor.constraint(equalTo: videoPlayerLayer.trailingAnchor), + sliderParentView.bottomAnchor.constraint(equalTo: view.safeAreaLayoutGuide.bottomAnchor, constant: 0), + sliderParentView.leadingAnchor.constraint(equalTo: view.leadingAnchor), + sliderParentView.trailingAnchor.constraint(equalTo: view.trailingAnchor), + sliderParentView.heightAnchor.constraint(equalToConstant: 100.0), - slider.leadingAnchor.constraint(equalTo: sliderParentView.leadingAnchor, constant: 10), - slider.trailingAnchor.constraint(equalTo: sliderParentView.trailingAnchor, constant: -10), - slider.heightAnchor.constraint(equalTo: sliderParentView.heightAnchor), - - playbutton.topAnchor.constraint(equalTo: sliderParentView.topAnchor, constant: 20), + playbutton.centerYAnchor.constraint(equalTo: sliderParentView.centerYAnchor), + playbutton.leadingAnchor.constraint(equalTo: sliderParentView.safeAreaLayoutGuide.leadingAnchor), playbutton.heightAnchor.constraint(equalToConstant: Constants.iconSize), playbutton.widthAnchor.constraint(equalToConstant: Constants.iconSize), + + slider.centerYAnchor.constraint(equalTo: sliderParentView.centerYAnchor), + slider.leadingAnchor.constraint(equalTo: playbutton.safeAreaLayoutGuide.trailingAnchor, constant: 10), + slider.trailingAnchor.constraint(equalTo: sliderParentView.safeAreaLayoutGuide.trailingAnchor, constant: -10) + ]) } @@ -198,6 +201,10 @@ class ViewController: UIViewController { self.showImagePicker() } + let multiVideo = UIAction(title: "Merge & Trim", image: nil, identifier: UIAction.Identifier("leftBtm1"), attributes: [], state: .off) { action in + self.mergeTrimVideoExample() + } + let selectImage = UIAction(title: "Select an Image", image: UIImage(systemName: "photo"), attributes: [], state: .off) { action in self.showImagePickerForWaterMark() } @@ -218,14 +225,19 @@ class ViewController: UIViewController { self.handleExportButtonAction() } - let deferredMenu = UIDeferredMenuElement { (menuElements) in + let deferredMenu2 = UIDeferredMenuElement { (menuElements) in let menu = UIMenu(title: "Image/Font Color", options: .displayInline, children: [addTextItem, selectImage, pickFontColor, addOnlyImageItem, setExportUrlItem]) menuElements([menu]) } + let deferredMenu1 = UIDeferredMenuElement { (menuElements) in + let menu = UIMenu(title: "Feature Example", options: .displayInline, children: [multiVideo]) + menuElements([menu]) + } + let elements: [UIAction] = [selectVideo] var menu = UIMenu(title: "Select Video", children: elements) - menu = menu.replacingChildren([selectVideo, deferredMenu]) + menu = menu.replacingChildren([selectVideo, deferredMenu1, deferredMenu2]) return menu } @@ -264,8 +276,8 @@ class ViewController: UIViewController { } private func setButtonProperties() { - playbutton.setImage(UIImage(systemName: Constants.playButton, withConfiguration: UIImage.SymbolConfiguration(pointSize: Constants.iconSize)), for: .selected) playbutton.setImage(UIImage(systemName: Constants.pauseButton, withConfiguration: UIImage.SymbolConfiguration(pointSize: Constants.iconSize)), for: .normal) + playbutton.setImage(UIImage(systemName: Constants.playButton, withConfiguration: UIImage.SymbolConfiguration(pointSize: Constants.iconSize)), for: .selected) } @objc private func handlePlayButtonAction(_ sender: Any) { @@ -365,6 +377,54 @@ class ViewController: UIViewController { self.present(alertController, animated: true, completion: nil) } + private func mergeTrimVideoExample() { + guard let portraitURL = Bundle.main.url(forResource: "portrait", withExtension: "MOV"), + let landscapeURL = Bundle.main.url(forResource: "landscape", withExtension: "MOV") + else { return } + + guard let exportUrl = exportUrl else { + showExportUrlNotPresent() + return + } + + videoPlayerLayer.isHidden = true + videoPlayerLayer.pause() + + spinner.isHidden = false + spinner.startAnimating() + + let portraitAsset = OSATVideoSource(videoURL: portraitURL, startTime: 2, duration: 5) + let landscapeAsset = OSATVideoSource(videoURL: landscapeURL, startTime: 2, duration: 5) + + DispatchQueue.global().async { + let compositor = OSATVideoComposition() + compositor.makeMultiVideoComposition(from: [portraitAsset, landscapeAsset], exportURL: exportUrl) { [weak self ] session in + guard let self = self else { return } + switch session.status { + case .completed: + guard let sessionOutputUrl = session.outputURL, NSData(contentsOf: sessionOutputUrl) != nil else { return } + DispatchQueue.main.async { + self.videoPlayerLayer.set(url: sessionOutputUrl) + self.play() + self.videoPlayerLayer.isHidden = false + self.spinner.isHidden = true + self.spinner.stopAnimating() + Task { + await self.getDuration() + } + } + + case .failed: + NSLog("error: \(String(describing: session.error))", "") + + default: break + } + } errorHandler: { error in + NSLog("\(error)", "") + } + } + } + private func addWaterMark() { guard let inputURL = originalVideoUrl else { return } diff --git a/Example/OSAT-VideoCompositor/landscape.MOV b/Example/OSAT-VideoCompositor/landscape.MOV new file mode 100644 index 0000000..f5ad437 Binary files /dev/null and b/Example/OSAT-VideoCompositor/landscape.MOV differ diff --git a/Example/OSAT-VideoCompositor/portrait.MOV b/Example/OSAT-VideoCompositor/portrait.MOV new file mode 100644 index 0000000..e03ac28 Binary files /dev/null and b/Example/OSAT-VideoCompositor/portrait.MOV differ diff --git a/OSAT-VideoCompositor/Classes/OSATExtension.swift b/OSAT-VideoCompositor/Classes/OSATExtension.swift new file mode 100644 index 0000000..1cf79f3 --- /dev/null +++ b/OSAT-VideoCompositor/Classes/OSATExtension.swift @@ -0,0 +1,76 @@ +// +// OSATExtension.swift +// OSAT-VideoCompositor +// +// Created by Urmit Chauhan on 03/05/23. +// + +import AVFoundation + +extension Double { + func toCMTime() -> CMTime { + return CMTime(seconds: self, preferredTimescale: CMTimeScale(NSEC_PER_SEC)) + } +} + +extension CGAffineTransform { + var orientation: (orientation: UIImage.Orientation, isPortrait: Bool) { + var assetOrientation = UIImage.Orientation.up + var isPortrait = false + switch [a, b, c, d] { + case [0.0, 1.0, -1.0, 0.0]: + assetOrientation = .right + isPortrait = true + + case [0.0, -1.0, 1.0, 0.0]: + assetOrientation = .left + isPortrait = true + + case [1.0, 0.0, 0.0, 1.0]: + assetOrientation = .up + + case [-1.0, 0.0, 0.0, -1.0]: + assetOrientation = .down + + default: + break + } + + return (assetOrientation, isPortrait) + } +} + +extension AVAssetTrack { + var fixedPreferredTransform: CGAffineTransform { + var newT = preferredTransform + switch [newT.a, newT.b, newT.c, newT.d] { + case [1, 0, 0, 1]: + newT.tx = 0 + newT.ty = 0 + case [1, 0, 0, -1]: + newT.tx = 0 + newT.ty = naturalSize.height + case [-1, 0, 0, 1]: + newT.tx = naturalSize.width + newT.ty = 0 + case [-1, 0, 0, -1]: + newT.tx = naturalSize.width + newT.ty = naturalSize.height + case [0, -1, 1, 0]: + newT.tx = 0 + newT.ty = naturalSize.width + case [0, 1, -1, 0]: + newT.tx = naturalSize.height + newT.ty = 0 + case [0, 1, 1, 0]: + newT.tx = 0 + newT.ty = 0 + case [0, -1, -1, 0]: + newT.tx = naturalSize.height + newT.ty = naturalSize.width + default: + break + } + return newT + } +} diff --git a/OSAT-VideoCompositor/Classes/OSATVideoComposition.swift b/OSAT-VideoCompositor/Classes/OSATVideoComposition.swift index a552437..4390857 100644 --- a/OSAT-VideoCompositor/Classes/OSATVideoComposition.swift +++ b/OSAT-VideoCompositor/Classes/OSATVideoComposition.swift @@ -6,10 +6,21 @@ // import AVFoundation +public struct OSATVideoSource { + public let videoURL: URL + public let startTime: Double + public let duration: Double + public init(videoURL: URL, startTime: Double, duration: Double) { + self.videoURL = videoURL + self.startTime = startTime + self.duration = duration + } +} public struct OSATVideoComposition { public init() {} + /// Creates a video composition for a source video with annotations /// - Parameters: /// - sourceVideoURL: URL for the source video @@ -43,7 +54,7 @@ public struct OSATVideoComposition { compositionTrack.preferredTransform = assetTrack.preferredTransform - let videoInfo = orientation(from: assetTrack.preferredTransform) + let videoInfo = assetTrack.preferredTransform.orientation let videoSize: CGSize = videoInfo.isPortrait ? CGSize(width: assetTrack.naturalSize.height, height: assetTrack.naturalSize.width) : assetTrack.naturalSize let videoLayer = CALayer() @@ -73,6 +84,103 @@ public struct OSATVideoComposition { let layerInstruction = compositionLayerInstruction(for: compositionTrack, assetTrack: assetTrack) instruction.layerInstructions = [layerInstruction] + export(composition: composition, videoComposition: videoComposition, exportURL: exportURL, completionHandler: completionHandler, errorHandler: errorHandler) + } + + /// Make a video from multiple videos. The user is able to merge and trim videos. + /// - Parameters: + /// - sourceItems: add source videos + /// - animation: set `true` for video end animation otherwise false + /// - exportURL: URL for saving the exported video + /// - completionHandler: completionHandler is called when video composition execute succesfully + /// - errorHandler: errorHandler is called when video composition failed due to any reason + public func makeMultiVideoComposition(from sourceItems:[OSATVideoSource], animation:Bool = true, exportURL: URL, completionHandler: @escaping(_ videExportSession: AVAssetExportSession) -> Void, errorHandler: @escaping(_ error: OSATVideoCompositionError)->Void) { + var insertTime = CMTime.zero + // currently it's support only single canvas size + let defaultSize = CGSize(width: 1280, height: 1280) // Default video size + var arrayLayerInstructions:[AVMutableVideoCompositionLayerInstruction] = [] + + // Init composition + let mixComposition = AVMutableComposition() + + for videoSource in sourceItems { + let videoAsset = AVAsset(url: videoSource.videoURL) + // Get video track + guard let videoTrack = videoAsset.tracks(withMediaType: AVMediaType.video).first else { continue } + + // Get audio track + var audioTrack:AVAssetTrack? + if videoAsset.tracks(withMediaType: AVMediaType.audio).count > 0 { + audioTrack = videoAsset.tracks(withMediaType: AVMediaType.audio).first + } + + // Init video & audio composition track + let videoCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, + preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) + + let audioCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, + preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) + + do { + let startTime = videoSource.startTime.toCMTime() // CMTime.zero + let duration = videoSource.duration.toCMTime() // videoAsset.duration + + // Add video track to video composition at specific time + try videoCompositionTrack?.insertTimeRange(CMTimeRangeMake(start: startTime, duration: duration), + of: videoTrack, + at: insertTime) + + // Add audio track to audio composition at specific time + if let audioTrack = audioTrack { + try audioCompositionTrack?.insertTimeRange(CMTimeRangeMake(start: startTime, duration: duration), + of: audioTrack, + at: insertTime) + } + + // Add layer instruction for video track + if let videoCompositionTrack = videoCompositionTrack { + let layerInstruction = videoCompositionInstructionForTrack(track: videoCompositionTrack, asset: videoAsset, targetSize: defaultSize) + + // Hide video track before changing to new track + let endTime = CMTimeAdd(insertTime, duration) + + if animation { + let durationAnimation = 1.0.toCMTime() + + layerInstruction.setOpacityRamp(fromStartOpacity: 1.0, toEndOpacity: 0.0, timeRange: CMTimeRange(start: endTime, duration: durationAnimation)) + } + else { + layerInstruction.setOpacity(0, at: endTime) + } + + arrayLayerInstructions.append(layerInstruction) + } + + // Increase the insert time + insertTime = CMTimeAdd(insertTime, duration) + } + catch { + print("Load track error") + } + } + + // Main video composition instruction + let mainInstruction = AVMutableVideoCompositionInstruction() + mainInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: insertTime) + mainInstruction.layerInstructions = arrayLayerInstructions + + // Main video composition + let mainVideoComposition = AVMutableVideoComposition() + mainVideoComposition.instructions = [mainInstruction] + mainVideoComposition.frameDuration = CMTimeMake(value: 1, timescale: 30) + mainVideoComposition.renderSize = defaultSize + + // do export + export(composition: mixComposition, videoComposition: mainVideoComposition, exportURL: exportURL, completionHandler: completionHandler, errorHandler: errorHandler) + } + + private func export(composition: AVMutableComposition, videoComposition: AVMutableVideoComposition, exportURL: URL, completionHandler: @escaping(_ videExportSession: AVAssetExportSession) -> Void, errorHandler: @escaping(_ error: OSATVideoCompositionError)->Void) { + guard let export = AVAssetExportSession( asset: composition, presetName: AVAssetExportPresetHighestQuality) @@ -83,49 +191,71 @@ public struct OSATVideoComposition { } let videoName = UUID().uuidString - let exportURL = exportURL.appendingPathComponent(videoName).appendingPathExtension("mov") + let exportURL = exportURL.appendingPathComponent(videoName).appendingPathExtension("mp4") export.videoComposition = videoComposition export.outputFileType = .mov export.outputURL = exportURL - export.exportAsynchronously { + export.exportAsynchronously(completionHandler: { DispatchQueue.main.async { switch export.status { case .completed: completionHandler(export) default: + print(export.error ?? "") errorHandler(.assetExportSessionFailed) break } } + }) + } + + private func videoCompositionInstructionForTrack(track: AVCompositionTrack?, asset: AVAsset, targetSize: CGSize) -> AVMutableVideoCompositionLayerInstruction { + guard let track = track else { + return AVMutableVideoCompositionLayerInstruction() + } + + let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track) + let assetTrack = asset.tracks(withMediaType: AVMediaType.video)[0] + + let transform = assetTrack.fixedPreferredTransform + let assetOrientation = transform.orientation + + let scaleToFitRatio = min(targetSize.width / assetTrack.naturalSize.width, targetSize.width / assetTrack.naturalSize.height) + if assetOrientation.isPortrait { + // Scale to fit target size + let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio) + + // Align center Y + let newX = targetSize.width/2 - ((assetTrack.naturalSize.height / 2) * scaleToFitRatio) + let newY = targetSize.height/2 - ((assetTrack.naturalSize.width / 2) * scaleToFitRatio) + let moveCenterFactor = CGAffineTransform(translationX: newX, y: newY) + + let finalTransform = transform.concatenating(scaleFactor).concatenating(moveCenterFactor) + + instruction.setTransform(finalTransform, at: .zero) + } else { + // Scale to fit target size + let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio) + + // Align center Y + let newY = targetSize.height/2 - (assetTrack.naturalSize.height * scaleToFitRatio)/2 + let moveCenterFactor = CGAffineTransform(translationX: 0, y: newY) + + let finalTransform = transform.concatenating(scaleFactor).concatenating(moveCenterFactor) + + instruction.setTransform(finalTransform, at: .zero) } + + return instruction } + private func compositionLayerInstruction(for track: AVCompositionTrack, assetTrack: AVAssetTrack) -> AVMutableVideoCompositionLayerInstruction { let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track) let transform = assetTrack.preferredTransform - instruction.setTransform(transform, at: .zero) - return instruction } - - private func orientation(from transform: CGAffineTransform) -> (orientation: UIImage.Orientation, isPortrait: Bool) { - var assetOrientation = UIImage.Orientation.up - var isPortrait = false - if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 { - assetOrientation = .right - isPortrait = true - } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 { - assetOrientation = .left - isPortrait = true - } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 { - assetOrientation = .up - } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 { - assetOrientation = .down - } - - return (assetOrientation, isPortrait) - } }