gpt4 book ai didi

ios - AVMutableAudioMixInputParameters 未正确应用于 AudioMiX

转载 作者:搜寻专家 更新时间:2023-10-30 22:13:51 28 4
gpt4 key购买 nike

我正在开发将多个视频与背景音轨结合在一起的应用程序。它还需要为不同的视频设置不同的音频级别。

以下是AssetItem类和AssetManager类的代码

 // AssetItem Class 
class AssetItem : NSObject {

var asset : Asset!
var assetEffect : AssetEffectType! // Enum
var assetSceneType : SceneType! // Enum
var videoLength : CMTime!
var animationLayer : AnyObject?
var volumeOfVideoVoice : Float = 0.0
var volumeOfBGMusic : Float = 0.0

override init() {
super.init()
}
}

// AssetManager Class implementation

class AssetManager{
var assetList = [AssetItem]()
var composition : AVMutableComposition! = AVMutableComposition()
var videoComposition : AVMutableVideoComposition? = AVMutableVideoComposition()
var audioMix : AVMutableAudioMix = AVMutableAudioMix()

var transitionDuration = CMTimeMakeWithSeconds(1, 600) // Default transitionDuration is 1 sec

var compositionTimeRanges : [NSValue] = [NSValue]()

var passThroughTimeRangeValue : [NSValue] = [NSValue]()
var transitionTimeRangeValue : [NSValue] = [NSValue]()

var videoTracks = [AVMutableCompositionTrack]()
var audioTracks = [AVMutableCompositionTrack]()


// MARK: - Constructor
override init() {

super.init()

let compositionTrackA = self.composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID(kCMPersistentTrackID_Invalid))
let compositionTrackB = self.composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID(kCMPersistentTrackID_Invalid))

let compositionTrackAudioA = self.composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID(kCMPersistentTrackID_Invalid))
let compositionTrackAudioB = self.composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID(kCMPersistentTrackID_Invalid))

self.videoTracks = [compositionTrackA, compositionTrackB]
self.audioTracks = [compositionTrackAudioA, compositionTrackAudioB]

}

func buildCompositionTrack(forExport : Bool ){
// This is the Method to Build Compositions
}
}

下面是BuildingCompositions的方法

func buildCompositionTrack(forExport : Bool) {

var cursorTIme = kCMTimeZero
var transitionDurationForEffect = kCMTimeZero

// Create a mutable composition instructions object
var videoCompositionInstructions = [AVMutableVideoCompositionInstruction]()
var audioMixInputParameters = [AVMutableAudioMixInputParameters]()

let timeRanges = calculateTimeRangeForAssetLayer()
self.passThroughTimeRangeValue = timeRanges.passThroughTimeRangeValue
self.transitionTimeRangeValue = timeRanges.transitionTimeRangeValue

let defaultMuteSoundTrackURL: NSURL = bundle.URLForResource("30sec", withExtension: "mp3")!
let muteSoundTrackAsset = AVURLAsset(URL: defaultMuteSoundTrackURL, options: nil)
let muteSoundTrack = muteSoundTrackAsset.tracksWithMediaType(AVMediaTypeAudio)[0]

for (index,assetItem) in self.assetsList.enumerate() {
let trackIndex = index % 2

let assetVideoTrack = assetItem.asset.movieAsset.tracksWithMediaType(AVMediaTypeVideo)[0]

let timeRange = CMTimeRangeMake(kCMTimeZero, assetItem.videoLength)

do {
try self.videoTracks[trackIndex].insertTimeRange(timeRange, ofTrack: assetVideoTrack, atTime: cursorTime)
} catch let error1 as NSError {
error = error1
}
if error != nil {
print("Error: buildCompositionTracks for video with parameter index: %@ and VideoCounts: %@ error: %@", ["\(index)", "\(self.assetsList.count)", "\(error?.description)"])
error = nil
}

if assetItem.asset.movieAsset.tracksWithMediaType(AVMediaTypeAudio).count > 0 {
let clipAudioTrack = assetItem.asset.movieAsset.tracksWithMediaType(AVMediaTypeAudio)[0]

do {
try audioTracks[trackIndex].insertTimeRange(timeRange, ofTrack: clipAudioTrack, atTime: cursorTime)
} catch let error1 as NSError {
error = error1
}
}else {
do {
try audioTracks[trackIndex].insertTimeRange(timeRange, ofTrack: muteSoundTrack, atTime: cursorTime)
}catch let error1 as NSError {
error = error1
}
}

// The end of this clip will overlap the start of the next by transitionDuration.
// (Note: this arithmetic falls apart if timeRangeInAsset.duration < 2 * transitionDuration.)

if assetItem.assetEffect == FLIXAssetEffectType.Default {

transitionDurationForEffect = kCMTimeZero
let timeRange = CMTimeRangeMake(cursorTime, assetItem.videoLength)
self.compositionTimeRanges.append(NSValue(CMTimeRange: timeRange))
cursorTime = CMTimeAdd(cursorTime, assetItem.videoLength)
} else {
transitionDurationForEffect = self.transitionDuration
let timeRange = CMTimeRangeMake(cursorTime, CMTimeSubtract(assetItem.videoLength, transitionDurationForEffect))
self.compositionTimeRanges.append(NSValue(CMTimeRange: timeRange))
cursorTime = CMTimeAdd(cursorTime, assetItem.videoLength)
cursorTime = CMTimeSubtract(cursorTime, transitionDurationForEffect)
}

videoCompositionInstructions.appendContentsOf(self.buildCompositionInstructions( index, assetItem : assetItem))

}

if self.project.hasProjectMusicTrack() && self.backgroundMusicTrack != nil {

let url: NSURL = bundle.URLForResource("Music9", withExtension: "mp3")!
bgMusicSound = AVURLAsset(URL: url, options: nil)
backgroundAudioTrack = bgMusicSound.tracksWithMediaType(AVMediaTypeAudio)[0]

let compositionBackgroundTrack = self.composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID(kCMPersistentTrackID_Invalid))

let soundDuration = CMTimeCompare(bgMusicSound.duration, self.composition.duration)

if soundDuration == -1 {

let bgMusicSoundTimeRange = CMTimeRangeMake(kCMTimeZero, bgMusicSound.duration)

let noOftimes = Int(CMTimeGetSeconds(self.composition.duration) / CMTimeGetSeconds(bgMusicSound.duration))
let remainingTime = CMTimeGetSeconds(self.composition.duration) % CMTimeGetSeconds(bgMusicSound.duration)

var musicCursorTime = kCMTimeZero

for _ in 0..<noOftimes {

do {
try compositionBackgroundTrack.insertTimeRange(bgMusicSoundTimeRange, ofTrack: backgroundAudioTrack, atTime: musicCursorTime)
} catch let error1 as NSError {
error = error1
}
musicCursorTime = CMTimeAdd(bgMusicSound.duration, musicCursorTime)
}
}

let backgroundMusciMixInputParameters = AVMutableAudioMixInputParameters(track: compositionBackgroundTrack)
backgroundMusciMixInputParameters.trackID = compositionBackgroundTrack.trackID

// setting up music levels for background music track.

for index in 0 ..< Int(self.compositionTimeRanges.count) {

let timeRange = self.compositionTimeRanges[index].CMTimeRangeValue
let scene = self.assetsList[index].assetSceneType

let volumeOfBGMusic = self.assetsList[index].volumeOfBGMusic
var nextvolumeOfBGMusic : Float = 0.0

if let nextAsset = self.assetsList[safe: index + 1] {
nextvolumeOfBGMusic = nextAsset.volumeOfBGMusic
}
backgroundMusciMixInputParameters.setVolume(volumeOfBGMusic, atTime: timeRange.start)
backgroundMusciMixInputParameters.setVolumeRampFromStartVolume(volumeOfBGMusic, toEndVolume: nextvolumeOfBGMusic, timeRange: CMTimeRangeMake(CMTimeSubtract(timeRange.end,CMTimeMake(2, 1)), CMTimeMake(2, 1)))
}

audioMixInputParameters.append(backgroundMusciMixInputParameters)

} // End of If for ProjectMusic Check


for (index, assetItem) in self.assetsList.enumerate(){

let trackIndex = index % 2

let timeRange = self.compositionTimeRanges[index].CMTimeRangeValue
let sceneType = assetItem.assetSceneType
let volumnOfVideoMusic = assetItem.volumeOfVideoVoice

let audioTrackParamater = AVMutableAudioMixInputParameters(track: self.audioTracks[trackIndex])

audioTrackParamater.trackID = self.audioTracks[trackIndex].trackID
audioTrackParamater.setVolume(0.0, atTime: kCMTimeZero ) // Statement 1
audioTrackParamater.setVolume(volumnOfVideoMusic, atTime: timeRange.start) // Statement 2
audioTrackParamater.setVolume(0.0, atTime: timeRange.end) // statement 3
audioMixInputParameters.append(audioTrackParamater)
}

self.audioMix.inputParameters = audioMixInputParameters
self.composition.naturalSize = self.videoRenderSize
self.videoComposition!.instructions = videoCompositionInstructions
self.videoComposition!.renderSize = self.videoRenderSize
self.videoComposition!.frameDuration = CMTimeMake(1, 30)
self.videoComposition!.renderScale = 1.0 // This is a iPhone only option.

}

在上面的代码中,背景音乐级别设置正确,但视频轨道的音频级别出现问题。我添加了 DebugView 来帮助调试组合,在调试 View 中一切看起来都很完美,但除了背景音乐轨道之外,视频的音频听不见了。我做错了什么吗?

如果我从上面的代码中删除语句 1,那么它是可以听到的,但现在它们在 1.0 级别都可以听到,并且不遵守设置的级别。

DebugView of Compositions and Audio Tracks

最佳答案

看起来您正在为 AVMutableCompositionTrack 中的每个“剪辑”使用新的 AVAudioMixInputParameters 对象。这是行不通的:新对象将与具有相同轨道 ID 的先前对象发生冲突。

您应该为每个 AVMutableCompositionTrack 使用单个 AVAudioMixInputParameters 对象,并使用每个所需的斜坡值更新该对象。

关于ios - AVMutableAudioMixInputParameters 未正确应用于 AudioMiX,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/38299620/

28 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com