- Java 双重比较
- java - 比较器与 Apache BeanComparator
- Objective-C 完成 block 导致额外的方法调用?
- database - RESTful URI 是否应该公开数据库主键?
我正在开发将多个视频与背景音轨结合在一起的应用程序。它还需要为不同的视频设置不同的音频级别。
以下是AssetItem类和AssetManager类的代码
// AssetItem Class
class AssetItem : NSObject {
var asset : Asset!
var assetEffect : AssetEffectType! // Enum
var assetSceneType : SceneType! // Enum
var videoLength : CMTime!
var animationLayer : AnyObject?
var volumeOfVideoVoice : Float = 0.0
var volumeOfBGMusic : Float = 0.0
override init() {
super.init()
}
}
// AssetManager Class implementation
class AssetManager{
var assetList = [AssetItem]()
var composition : AVMutableComposition! = AVMutableComposition()
var videoComposition : AVMutableVideoComposition? = AVMutableVideoComposition()
var audioMix : AVMutableAudioMix = AVMutableAudioMix()
var transitionDuration = CMTimeMakeWithSeconds(1, 600) // Default transitionDuration is 1 sec
var compositionTimeRanges : [NSValue] = [NSValue]()
var passThroughTimeRangeValue : [NSValue] = [NSValue]()
var transitionTimeRangeValue : [NSValue] = [NSValue]()
var videoTracks = [AVMutableCompositionTrack]()
var audioTracks = [AVMutableCompositionTrack]()
// MARK: - Constructor
override init() {
super.init()
let compositionTrackA = self.composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID(kCMPersistentTrackID_Invalid))
let compositionTrackB = self.composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID(kCMPersistentTrackID_Invalid))
let compositionTrackAudioA = self.composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID(kCMPersistentTrackID_Invalid))
let compositionTrackAudioB = self.composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID(kCMPersistentTrackID_Invalid))
self.videoTracks = [compositionTrackA, compositionTrackB]
self.audioTracks = [compositionTrackAudioA, compositionTrackAudioB]
}
func buildCompositionTrack(forExport : Bool ){
// This is the Method to Build Compositions
}
}
下面是BuildingCompositions的方法
func buildCompositionTrack(forExport : Bool) {
var cursorTIme = kCMTimeZero
var transitionDurationForEffect = kCMTimeZero
// Create a mutable composition instructions object
var videoCompositionInstructions = [AVMutableVideoCompositionInstruction]()
var audioMixInputParameters = [AVMutableAudioMixInputParameters]()
let timeRanges = calculateTimeRangeForAssetLayer()
self.passThroughTimeRangeValue = timeRanges.passThroughTimeRangeValue
self.transitionTimeRangeValue = timeRanges.transitionTimeRangeValue
let defaultMuteSoundTrackURL: NSURL = bundle.URLForResource("30sec", withExtension: "mp3")!
let muteSoundTrackAsset = AVURLAsset(URL: defaultMuteSoundTrackURL, options: nil)
let muteSoundTrack = muteSoundTrackAsset.tracksWithMediaType(AVMediaTypeAudio)[0]
for (index,assetItem) in self.assetsList.enumerate() {
let trackIndex = index % 2
let assetVideoTrack = assetItem.asset.movieAsset.tracksWithMediaType(AVMediaTypeVideo)[0]
let timeRange = CMTimeRangeMake(kCMTimeZero, assetItem.videoLength)
do {
try self.videoTracks[trackIndex].insertTimeRange(timeRange, ofTrack: assetVideoTrack, atTime: cursorTime)
} catch let error1 as NSError {
error = error1
}
if error != nil {
print("Error: buildCompositionTracks for video with parameter index: %@ and VideoCounts: %@ error: %@", ["\(index)", "\(self.assetsList.count)", "\(error?.description)"])
error = nil
}
if assetItem.asset.movieAsset.tracksWithMediaType(AVMediaTypeAudio).count > 0 {
let clipAudioTrack = assetItem.asset.movieAsset.tracksWithMediaType(AVMediaTypeAudio)[0]
do {
try audioTracks[trackIndex].insertTimeRange(timeRange, ofTrack: clipAudioTrack, atTime: cursorTime)
} catch let error1 as NSError {
error = error1
}
}else {
do {
try audioTracks[trackIndex].insertTimeRange(timeRange, ofTrack: muteSoundTrack, atTime: cursorTime)
}catch let error1 as NSError {
error = error1
}
}
// The end of this clip will overlap the start of the next by transitionDuration.
// (Note: this arithmetic falls apart if timeRangeInAsset.duration < 2 * transitionDuration.)
if assetItem.assetEffect == FLIXAssetEffectType.Default {
transitionDurationForEffect = kCMTimeZero
let timeRange = CMTimeRangeMake(cursorTime, assetItem.videoLength)
self.compositionTimeRanges.append(NSValue(CMTimeRange: timeRange))
cursorTime = CMTimeAdd(cursorTime, assetItem.videoLength)
} else {
transitionDurationForEffect = self.transitionDuration
let timeRange = CMTimeRangeMake(cursorTime, CMTimeSubtract(assetItem.videoLength, transitionDurationForEffect))
self.compositionTimeRanges.append(NSValue(CMTimeRange: timeRange))
cursorTime = CMTimeAdd(cursorTime, assetItem.videoLength)
cursorTime = CMTimeSubtract(cursorTime, transitionDurationForEffect)
}
videoCompositionInstructions.appendContentsOf(self.buildCompositionInstructions( index, assetItem : assetItem))
}
if self.project.hasProjectMusicTrack() && self.backgroundMusicTrack != nil {
let url: NSURL = bundle.URLForResource("Music9", withExtension: "mp3")!
bgMusicSound = AVURLAsset(URL: url, options: nil)
backgroundAudioTrack = bgMusicSound.tracksWithMediaType(AVMediaTypeAudio)[0]
let compositionBackgroundTrack = self.composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID(kCMPersistentTrackID_Invalid))
let soundDuration = CMTimeCompare(bgMusicSound.duration, self.composition.duration)
if soundDuration == -1 {
let bgMusicSoundTimeRange = CMTimeRangeMake(kCMTimeZero, bgMusicSound.duration)
let noOftimes = Int(CMTimeGetSeconds(self.composition.duration) / CMTimeGetSeconds(bgMusicSound.duration))
let remainingTime = CMTimeGetSeconds(self.composition.duration) % CMTimeGetSeconds(bgMusicSound.duration)
var musicCursorTime = kCMTimeZero
for _ in 0..<noOftimes {
do {
try compositionBackgroundTrack.insertTimeRange(bgMusicSoundTimeRange, ofTrack: backgroundAudioTrack, atTime: musicCursorTime)
} catch let error1 as NSError {
error = error1
}
musicCursorTime = CMTimeAdd(bgMusicSound.duration, musicCursorTime)
}
}
let backgroundMusciMixInputParameters = AVMutableAudioMixInputParameters(track: compositionBackgroundTrack)
backgroundMusciMixInputParameters.trackID = compositionBackgroundTrack.trackID
// setting up music levels for background music track.
for index in 0 ..< Int(self.compositionTimeRanges.count) {
let timeRange = self.compositionTimeRanges[index].CMTimeRangeValue
let scene = self.assetsList[index].assetSceneType
let volumeOfBGMusic = self.assetsList[index].volumeOfBGMusic
var nextvolumeOfBGMusic : Float = 0.0
if let nextAsset = self.assetsList[safe: index + 1] {
nextvolumeOfBGMusic = nextAsset.volumeOfBGMusic
}
backgroundMusciMixInputParameters.setVolume(volumeOfBGMusic, atTime: timeRange.start)
backgroundMusciMixInputParameters.setVolumeRampFromStartVolume(volumeOfBGMusic, toEndVolume: nextvolumeOfBGMusic, timeRange: CMTimeRangeMake(CMTimeSubtract(timeRange.end,CMTimeMake(2, 1)), CMTimeMake(2, 1)))
}
audioMixInputParameters.append(backgroundMusciMixInputParameters)
} // End of If for ProjectMusic Check
for (index, assetItem) in self.assetsList.enumerate(){
let trackIndex = index % 2
let timeRange = self.compositionTimeRanges[index].CMTimeRangeValue
let sceneType = assetItem.assetSceneType
let volumnOfVideoMusic = assetItem.volumeOfVideoVoice
let audioTrackParamater = AVMutableAudioMixInputParameters(track: self.audioTracks[trackIndex])
audioTrackParamater.trackID = self.audioTracks[trackIndex].trackID
audioTrackParamater.setVolume(0.0, atTime: kCMTimeZero ) // Statement 1
audioTrackParamater.setVolume(volumnOfVideoMusic, atTime: timeRange.start) // Statement 2
audioTrackParamater.setVolume(0.0, atTime: timeRange.end) // statement 3
audioMixInputParameters.append(audioTrackParamater)
}
self.audioMix.inputParameters = audioMixInputParameters
self.composition.naturalSize = self.videoRenderSize
self.videoComposition!.instructions = videoCompositionInstructions
self.videoComposition!.renderSize = self.videoRenderSize
self.videoComposition!.frameDuration = CMTimeMake(1, 30)
self.videoComposition!.renderScale = 1.0 // This is a iPhone only option.
}
在上面的代码中,背景音乐级别设置正确,但视频轨道的音频级别出现问题。我添加了 DebugView 来帮助调试组合,在调试 View 中一切看起来都很完美,但除了背景音乐轨道之外,视频的音频听不见了。我做错了什么吗?
如果我从上面的代码中删除语句 1,那么它是可以听到的,但现在它们在 1.0 级别都可以听到,并且不遵守设置的级别。
最佳答案
看起来您正在为 AVMutableCompositionTrack 中的每个“剪辑”使用新的 AVAudioMixInputParameters 对象。这是行不通的:新对象将与具有相同轨道 ID 的先前对象发生冲突。
您应该为每个 AVMutableCompositionTrack 使用单个 AVAudioMixInputParameters 对象,并使用每个所需的斜坡值更新该对象。
关于ios - AVMutableAudioMixInputParameters 未正确应用于 AudioMiX,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/38299620/
首先,我不确定这是否是报告此问题的正确位置,但在任何地方都找不到任何信息或任何类型的文档。 几天(也许几周)前,Android Vitals 开始提醒我们 Google Play 仪表板中的“不良行为
我想在我的应用程序中使用音频混合器,它接收来自不同来源的音频,并应在扬声器中一起播放它们。 我的最终应用程序应该执行类似以下命令的操作: gst-launch-1.0 audiomixer name=
我正在开发将多个视频与背景音轨结合在一起的应用程序。它还需要为不同的视频设置不同的音频级别。 以下是AssetItem类和AssetManager类的代码 // AssetItem Class cl
问题:尝试创建一个应用到 AVPlayerItem 的 Mix,但它在点击 setVolumeRampFromStartVolume 时因 ECX_BAD_ACCESS 而崩溃: 代码: AVMuta
我正在构建一个应用程序,它结合了几个视频,将它们合并为一个并在视频中放置了自定义音轨。这一切都完美无缺。现在我想淡出我的音频。另一方面,这是行不通的,我不知道为什么。这是我的代码: let durat
我是一名优秀的程序员,十分优秀!