gpt4 book ai didi

ios - 为什么 AVFoundation 合并后第二个视频播放为黑色?

转载 作者:搜寻专家 更新时间:2023-11-01 05:39:18 24 4
gpt4 key购买 nike

我在一个作品中使用了 3 个视频。我似乎无法弄清楚为什么第二个将它们合并在一起后会播放黑色。它在调试时具有正确的时间和值,所以我知道它在那里。这是我的代码:

    import UIKit
import AVFoundation
import MobileCoreServices
import CoreMedia
import AssetsLibrary
import MediaPlayer
import Photos

class MergeViewController: UIViewController, UINavigationControllerDelegate, UIImagePickerControllerDelegate, MPMediaPickerControllerDelegate {

let clipPicker = UIImagePickerController()
let audioPicker = MPMediaPickerController()

var isSelectingAsset: Int!
// video clip 1
var firstAsset: AVAsset!
// video clip 2
var secondAsset: AVAsset!
// video clip 3
var thirdAsset: AVAsset!
// sound track
var audioAsset: AVAsset!
// activity view indicator


override func viewDidLoad() {
super.viewDidLoad()

clipPicker.delegate = self
clipPicker.sourceType = UIImagePickerControllerSourceType.PhotoLibrary
clipPicker.mediaTypes = [kUTTypeMovie]

audioPicker.delegate = self
audioPicker.prompt = "Select Audio"

}

@IBAction func loadAsset1(sender: AnyObject) {

if UIImagePickerController.isSourceTypeAvailable(UIImagePickerControllerSourceType.PhotoLibrary) {

isSelectingAsset = 1
self.presentViewController(clipPicker, animated: true, completion: nil)
}

}

@IBAction func loadAsset2(sender: AnyObject) {

if UIImagePickerController.isSourceTypeAvailable(UIImagePickerControllerSourceType.PhotoLibrary) {

isSelectingAsset = 2
self.presentViewController(clipPicker, animated: true, completion: nil)
}
}

@IBAction func loadAsset3(sender: AnyObject) {

if UIImagePickerController.isSourceTypeAvailable(UIImagePickerControllerSourceType.PhotoLibrary) {

isSelectingAsset = 3
self.presentViewController(clipPicker, animated: true, completion: nil)
}


}
@IBAction func loadAudio(sender: AnyObject) {

self.presentViewController(audioPicker, animated: true, completion: nil)
}

@IBAction func mergeMedia(sender: AnyObject) {

if firstAsset != nil && secondAsset != nil && thirdAsset != nil {

// set up container to hold media tracks.
var mixComposition = AVMutableComposition()
// track times
let track1to2Time = CMTimeAdd(firstAsset.duration, secondAsset.duration)
let totalTime = CMTimeAdd(track1to2Time, thirdAsset.duration)
// create separate video tracks for individual adjustments before merge
var firstTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, firstAsset.duration),
ofTrack: firstAsset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack,
atTime: kCMTimeZero,
error: nil)

var secondTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
secondTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, secondAsset.duration),
ofTrack: secondAsset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack,
atTime: firstAsset.duration,
error: nil)

var thirdTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
thirdTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, thirdAsset.duration),
ofTrack: thirdAsset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack,
atTime: track1to2Time,
error: nil)

// Set up an overall instructions array
var mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, totalTime)

// Create seperate instructions for each track with helper method to correct orientation.
let firstInstruction = videoCompositionInstructionForTrack(firstTrack, asset: firstAsset)
// Make sure each track becomes transparent at end for the next one to play.
firstInstruction.setOpacity(0.0, atTime: firstAsset.duration)
let secondInstruction = videoCompositionInstructionForTrack(secondTrack, asset: secondAsset)
secondInstruction.setOpacity(0.0, atTime: secondAsset.duration)
let thirdInstruction = videoCompositionInstructionForTrack(thirdTrack, asset: thirdAsset)
// Add individual instructions to main for execution.
mainInstruction.layerInstructions = [firstInstruction, secondInstruction, thirdInstruction]
let mainComposition = AVMutableVideoComposition()
// Add instruction composition to main composition and set frame rate to 30 per second.
mainComposition.instructions = [mainInstruction]
mainComposition.frameDuration = CMTimeMake(1, 30)
mainComposition.renderSize = CGSize(
width: UIScreen.mainScreen().bounds.width,
height: UIScreen.mainScreen().bounds.height)
// get audio
if audioAsset != nil {

let audioTrack: AVMutableCompositionTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: 0)

audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, totalTime), ofTrack: audioAsset.tracksWithMediaType(AVMediaTypeAudio)[0] as! AVAssetTrack,
atTime: kCMTimeZero,
error: nil)
}
// get path
var paths: NSArray = NSSearchPathForDirectoriesInDomains(NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomainMask.UserDomainMask, true)

let documentsDirectory: String = paths[0] as! String
let id = String(arc4random() % 1000)
let myPathDocs: String = documentsDirectory.stringByAppendingPathComponent("mergeVideo-\(id).mov")
let url = NSURL(fileURLWithPath: myPathDocs)
// make exporter
var exporter = AVAssetExportSession(
asset: mixComposition,
presetName: AVAssetExportPresetHighestQuality)
exporter.outputURL = url
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.shouldOptimizeForNetworkUse = true
exporter.videoComposition = mainComposition
exporter.exportAsynchronouslyWithCompletionHandler() {
dispatch_async(dispatch_get_main_queue(), { () -> Void in
self.exportDidFinish(exporter)
})
}
}
}

// MARK: Image Picker Methods
func imagePickerController(picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [NSObject : AnyObject]) {
// Get the media type
let imageType: String = info[UIImagePickerControllerMediaType] as! String
let url: NSURL = info[UIImagePickerControllerMediaURL] as! NSURL
println(imageType)
// Place movie in queue.
if isSelectingAsset == 1 {

println("Success loading 1")
firstAsset = AVAsset.assetWithURL(url) as! AVAsset

} else if isSelectingAsset == 2 {
println("Success loading 2")
secondAsset = AVAsset.assetWithURL(url) as! AVAsset

} else if isSelectingAsset == 3 {
println("Success loading 3")
thirdAsset = AVAsset.assetWithURL(url) as! AVAsset
}

// Dismiss movie selection.
self.dismissViewControllerAnimated(true, completion: nil)
}

func mediaPicker(mediaPicker: MPMediaPickerController!, didPickMediaItems mediaItemCollection: MPMediaItemCollection!) {

let song: NSArray = [mediaItemCollection.items]
if song.count > 0 {

var selectedSong: MPMediaItem! = song[0][0] as! MPMediaItem
let url: NSURL = selectedSong.valueForProperty(MPMediaItemPropertyAssetURL) as! NSURL
audioAsset = AVAsset.assetWithURL(url) as! AVAsset
println("Audio loaded")
}
self.dismissViewControllerAnimated(true, completion: nil)
}

func mediaPickerDidCancel(mediaPicker: MPMediaPickerController!) {

self.dismissViewControllerAnimated(true, completion: nil)
}

// MARK: Merge Helper Methods
func exportDidFinish(session:AVAssetExportSession) {

assert(session.status == AVAssetExportSessionStatus.Completed, "Session status not completed")

if session.status == AVAssetExportSessionStatus.Completed {

let outputURL: NSURL = session.outputURL
let library = ALAssetsLibrary()
if library.videoAtPathIsCompatibleWithSavedPhotosAlbum(outputURL) {
library.writeVideoAtPathToSavedPhotosAlbum(outputURL, completionBlock: { (assetURL: NSURL!, error: NSError!) -> Void in

dispatch_async(dispatch_get_main_queue(), { () -> Void in

if (error != nil) {

let alert = UIAlertView(title: "Error", message: "Failed to save video.", delegate: nil, cancelButtonTitle: "OK")
alert.show()
} else {

let alert = UIAlertView(title: "Success", message: "Video saved.", delegate: nil, cancelButtonTitle: "OK")
alert.show()
}
})
})
}
}
audioAsset = nil
firstAsset = nil
secondAsset = nil
thirdAsset = nil
}

// Identify the correct orientation for the output video based on the input.
func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {

var assetOrientation = UIImageOrientation.Up
var isPortrait = false
if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
assetOrientation = .Right
isPortrait = true
} else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
assetOrientation = .Left
isPortrait = true
} else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
assetOrientation = .Up
} else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
assetOrientation = .Down
}
return (assetOrientation, isPortrait)
}

func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {

// get the asset tracks current orientation
let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
let assetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack
var transform = assetTrack.preferredTransform
// identify the needed orientation
let assetInfo = orientationFromTransform(transform)
// find the size needed to fit the track in the screen for landscape
var scaleToFitRatio = UIScreen.mainScreen().bounds.width / assetTrack.naturalSize.width

// if it is portrait, get the size to fit the track in the screen and return instruction to scale.
if assetInfo.isPortrait {

scaleToFitRatio = UIScreen.mainScreen().bounds.width / assetTrack.naturalSize.height
let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio)
instruction.setTransform(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor),
atTime: kCMTimeZero)
} else {

// If it is landscape then check for incorrect orientation and correct if needed, then return instructon to re-orient and scale.
let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio)
var concat = CGAffineTransformConcat(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor), CGAffineTransformMakeTranslation(0, UIScreen.mainScreen().bounds.width / 2))
if assetInfo.orientation == .Down {
let fixUpsideDown = CGAffineTransformMakeRotation(CGFloat(M_PI))
let windowBounds = UIScreen.mainScreen().bounds
let yFix = assetTrack.naturalSize.height + windowBounds.height
let centerFix = CGAffineTransformMakeTranslation(assetTrack.naturalSize.width, yFix)
concat = CGAffineTransformConcat(CGAffineTransformConcat(fixUpsideDown, centerFix), scaleFactor)
}
instruction.setTransform(concat, atTime: kCMTimeZero)
}

最佳答案

在第一个 + 第二个轨道持续时间之后,您是否应该将第二个轨道的不透明度设置为 0.0 而不是现在:

secondInstruction.setOpacity(0.0, atTime: secondAsset.duration)

到:

secondInstruction.setOpacity(0.0, atTime: firstAsset.duration+secondAsset.duration) //(Untested, but the idea is there).

关于ios - 为什么 AVFoundation 合并后第二个视频播放为黑色?,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/32636814/

24 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com