gpt4 book ai didi

ios - AVAssetExportSession 在运行时不起作用

转载 作者:行者123 更新时间:2023-11-30 11:21:09 27 4
gpt4 key购买 nike

我想合并视频和音频文件。我的程序在调试/单步模式下按我想要的方式工作,但在运行时不工作。我想这可能是“exportAsynchronously”函数的问题,我在加载之前访问了一些值。这是我的代码。

合并视频和音频的功能:

func mergeVideoAndAudio(videoUrl: URL,
audioUrl: URL,
shouldFlipHorizontally: Bool = false,
completion: @escaping (_ error: Error?, _ url: URL?) -> Void) {

let mixComposition = AVMutableComposition()
var mutableCompositionVideoTrack = [AVMutableCompositionTrack]()
var mutableCompositionAudioTrack = [AVMutableCompositionTrack]()
var mutableCompositionAudioOfVideoTrack = [AVMutableCompositionTrack]()

//start merge

let aVideoAsset = AVAsset(url: videoUrl)
let aAudioAsset = AVAsset(url: audioUrl)

let compositionAddVideo = mixComposition.addMutableTrack(withMediaType: AVMediaType.video,
preferredTrackID: kCMPersistentTrackID_Invalid)

let compositionAddAudio = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio,
preferredTrackID: kCMPersistentTrackID_Invalid)

let compositionAddAudioOfVideo = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio,
preferredTrackID: kCMPersistentTrackID_Invalid)

let aVideoAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaType.video)[0]
let aAudioOfVideoAssetTrack: AVAssetTrack? = aVideoAsset.tracks(withMediaType: AVMediaType.audio).first
let aAudioAssetTrack: AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaType.audio)[0]

// Default must have tranformation
compositionAddVideo?.preferredTransform = aVideoAssetTrack.preferredTransform

if shouldFlipHorizontally {
// Flip video horizontally
var frontalTransform: CGAffineTransform = CGAffineTransform(scaleX: -1.0, y: 1.0)
frontalTransform = frontalTransform.translatedBy(x: -aVideoAssetTrack.naturalSize.width, y: 0.0)
frontalTransform = frontalTransform.translatedBy(x: 0.0, y: -aVideoAssetTrack.naturalSize.width)
compositionAddVideo?.preferredTransform = frontalTransform
}

mutableCompositionVideoTrack.append(compositionAddVideo!)
mutableCompositionAudioTrack.append(compositionAddAudio!)
mutableCompositionAudioOfVideoTrack.append(compositionAddAudioOfVideo!)

do {
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero,
aVideoAssetTrack.timeRange.duration),
of: aVideoAssetTrack,
at: kCMTimeZero)

//In my case my audio file is longer then video file so i took videoAsset duration
//instead of audioAsset duration
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero,
aVideoAssetTrack.timeRange.duration),
of: aAudioAssetTrack,
at: kCMTimeZero)

// adding audio (of the video if exists) asset to the final composition
if let aAudioOfVideoAssetTrack = aAudioOfVideoAssetTrack {
try mutableCompositionAudioOfVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero,
aVideoAssetTrack.timeRange.duration),
of: aAudioOfVideoAssetTrack,
at: kCMTimeZero)
}
} catch {
print(error.localizedDescription)
}

// Exporting
let savePathUrl: URL = URL(fileURLWithPath: NSHomeDirectory() + "/Documents/newVideo.mp4")
do { // delete old video
try FileManager.default.removeItem(at: savePathUrl)
} catch { print(error.localizedDescription) }

let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileType.mp4
assetExport.outputURL = savePathUrl
assetExport.shouldOptimizeForNetworkUse = true

assetExport.exportAsynchronously { () -> Void in
switch assetExport.status {
case AVAssetExportSessionStatus.completed:
print("success")
completion(nil, savePathUrl)
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport.error?.localizedDescription ?? "error nil")")
completion(assetExport.error, nil)
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport.error?.localizedDescription ?? "error nil")")
completion(assetExport.error, nil)
default:
print("complete")
completion(assetExport.error, nil)
}
}

}

我调用该函数的位置:

@objc func touchCameraButton(){
soundFileURL!, completion: {(err, url) in print("fuck")})

if isVideoRecordingOn{

if isFirstTouch{
startCapture()
recordAudio()
isFirstTouch = false
}else{
startCapture()
recordAudio()
self.mergeVideoAndAudio(videoUrl: videoFileURL, audioUrl: soundFileURL!, shouldFlipHorizontally: true, completion: {(err, url) in DispatchQueue.main.async {
self.videoWithAudioURL = url
if (err != nil){
print(err?.localizedDescription ?? "Error in merging video and audio")
}
self.isFirstTouch = true
}
})
}
}
}

最佳答案

session 在后台线程上执行,因此您必须在主线程上的完成句柄中执行所有操作。

您只需按以下操作即可

    assetExport.exportAsynchronously { () -> Void in
DispatchQueue.main.async {
switch assetExport.status {
case AVAssetExportSessionStatus.completed:
print("success")
completion(nil, savePathUrl)
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport.error?.localizedDescription ?? "error nil")")
completion(assetExport.error, nil)
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport.error?.localizedDescription ?? "error nil")")
completion(assetExport.error, nil)
default:
print("complete")
completion(assetExport.error, nil)
}
}
}

关于ios - AVAssetExportSession 在运行时不起作用,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/51257501/

27 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com