- html - 出于某种原因,IE8 对我的 Sass 文件中继承的 html5 CSS 不友好?
- JMeter 在响应断言中使用 span 标签的问题
- html - 在 :hover and :active? 上具有不同效果的 CSS 动画
- html - 相对于居中的 html 内容固定的 CSS 重复背景?
将录制的视频和资源中的音频混合时,无法设置音频音量。
这是我的代码:
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
NSString *resourcePath = [[NSBundle mainBundle] pathForResource:@"give-it-away" ofType:@"mp3"];
AVURLAsset *audioAsset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:resourcePath] options:[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:YES],AVURLAssetPreferPreciseDurationAndTimingKey, nil]];
AVMutableCompositionTrack *videoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:[videoAsset.tracks objectAtIndex:0] atTime:kCMTimeZero error:nil];
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioTime) ofTrack:[audioAsset.tracks objectAtIndex:0] atTime:kCMTimeZero error:&videoError];
AVMutableAudioMixInputParameters *audioInputParams = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:audioTrack] ;
[audioInputParams setVolume:0.3 atTime:kCMTimeZero];
[audioInputParams setTrackID:audioTrack.trackID];
AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
audioMix.inputParameters = [NSArray arrayWithObject:audioInputParams];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetPassthrough];
exportSession.outputURL = [NSURL fileURLWithPath:finalVideoWithAudioPath];
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
exportSession.audioMix = audioMix;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status)
{
case AVAssetExportSessionStatusFailed:
{
[self performSelectorOnMainThread:@selector(doPostExportFailed) withObject:nil waitUntilDone:NO];
break;
}
case AVAssetExportSessionStatusCompleted:
{
[self performSelectorOnMainThread:@selector(doPostExportSuccess) withObject:nil waitUntilDone:YES];
break;
}
};
}];
最佳答案
更新(2018年12月)
这段代码在iOS 12.1.2上对我有用:
- (void) combineAudio:(NSString*)audioPath forRecord:(VideoRecord*)record isResource:(BOOL)isResource isSilent:(BOOL)isSilent keepCurrentAudio:(BOOL)keepCurrentAudio withCompletionHandler:(void (^)(AVAssetExportSession* exportSession, NSString* exportPath))handler {
NSString *resourcePath = audioPath;
if (isResource) {
resourcePath = [[NSBundle mainBundle] pathForResource:resourcePath ofType:@"mp3"];
}
NSURL *url = [NSURL fileURLWithPath:resourcePath];
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:url options:nil];
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:[NSURL URLWithString:[NSString stringWithFormat:@"file://%@",record.videoPath]] options:nil];
AVMutableComposition *composition = [self getComposition];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioOriginalTrack = nil;
if (keepCurrentAudio && [videoAsset tracksWithMediaType:AVMediaTypeAudio].count > 0) {
compositionAudioOriginalTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioOriginalTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
}
if (isResource) {
CMTime videoDuration = videoAsset.duration;
if(CMTimeCompare(videoDuration, audioAsset.duration) == -1){
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
} else if(CMTimeCompare(videoDuration, audioAsset.duration) == 1) {
CMTime currentTime = kCMTimeZero;
while(YES){
CMTime audioDuration = audioAsset.duration;
CMTime totalDuration = CMTimeAdd(currentTime,audioDuration);
if(CMTimeCompare(totalDuration, videoDuration)==1){
audioDuration = CMTimeSubtract(totalDuration,videoDuration);
}
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioDuration)
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:currentTime error:nil];
currentTime = CMTimeAdd(currentTime, audioDuration);
if(CMTimeCompare(currentTime, videoDuration) == 1 || CMTimeCompare(currentTime, videoDuration) == 0){
break;
}
}
}
} else {
NSArray<AVAssetTrack *>* aTracks = [audioAsset tracksWithMediaType:AVMediaTypeAudio];
if (aTracks.count > 0) {
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[aTracks objectAtIndex:0]
atTime:kCMTimeZero error:nil];
}
}
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
NSArray *tracks = [videoAsset tracksWithMediaType:AVMediaTypeVideo];
if (tracks.count == 0) {
CLSNSLog(@"%@ - combineAudio - video tracks zero", NSStringFromClass([self class]));
// TODO - Handle this error.
return;
}
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[tracks objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:[composition copy]
presetName:AVAssetExportPresetHighestQuality];
NSString *exportPath = [record.videoPath stringByReplacingOccurrencesOfString:@".mp4" withString:@"_audio_added.mp4"];
if ([record.videoPath containsString:@".MOV"]) {
exportPath = [record.videoPath stringByReplacingOccurrencesOfString:@".MOV" withString:@"_audio_added.mp4"];
}
if ([record.videoPath containsString:@".mov"]) {
exportPath = [record.videoPath stringByReplacingOccurrencesOfString:@".mov" withString:@"_audio_added.mp4"];
}
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath]) {
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
}
AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
float volume = .5f;
if (keepCurrentAudio) {
volume = .6f;
}
AVMutableAudioMixInputParameters *audioInputParams = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:compositionAudioTrack] ;
[audioInputParams setVolumeRampFromStartVolume:(isSilent ? .0f : volume) toEndVolume:(isSilent ? .0f : volume) timeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)];
[audioInputParams setTrackID:compositionAudioTrack.trackID];
NSArray *inputParams = [NSArray arrayWithObject:audioInputParams];
AVMutableAudioMixInputParameters *audioOriginalInputParams = nil;
if (keepCurrentAudio) {
audioOriginalInputParams = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:compositionAudioOriginalTrack] ;
[audioInputParams setVolumeRampFromStartVolume:(isSilent ? .0f : .06f) toEndVolume:(isSilent ? .0f : .06f) timeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)];
[audioInputParams setTrackID:compositionAudioOriginalTrack.trackID];
inputParams = [NSArray arrayWithObjects:audioInputParams, audioOriginalInputParams, nil];
}
audioMix.inputParameters = inputParams;
_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
_assetExport.audioMix = audioMix;
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:^{
handler(_assetExport, exportPath);
}];
}
[[FBVideoEditor shared] combineAudio:sound forRecord:self.record isResource:YES isSilent:NO withCompletionHandler:^(AVAssetExportSession *exportSession, NSString *exportPath) {
switch (exportSession.status)
{
case AVAssetExportSessionStatusCancelled:
CLSNSLog(@"AVAssetExportSessionStatusCancelled");
break;
case AVAssetExportSessionStatusExporting:
CLSNSLog(@"AVAssetExportSessionStatusExporting");
break;
case AVAssetExportSessionStatusUnknown:
CLSNSLog(@"AVAssetExportSessionStatusUnknown");
break;
case AVAssetExportSessionStatusWaiting:
CLSNSLog(@"AVAssetExportSessionStatusWaiting");
break;
case AVAssetExportSessionStatusFailed:
{
CLSNSLog(@"Export Failed with error messsage: %@", exportSession.error.userInfo );
break;
}
case AVAssetExportSessionStatusCompleted:
{
// Success
}
};
}];
关于ios - AVMutableCompositionTrack setVolume不起作用,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/28767502/
将录制的视频和资源中的音频混合时,无法设置音频音量。 这是我的代码: AVMutableComposition *composition = [[AVMutableComposition alloc]
我有两个音轨,我可以像这样相互组合: AVMutableComposition *composition = [[AVMutableComposition alloc] init]; AVMutabl
我尝试将.mp3-文件添加到AVMutableCompositionTrack,然后再导出新文件。问题是:导出后,生成的文件存在,但为空,无法播放。有人在我的代码中看到错误了吗? AVMutableC
在我的应用程序中,我需要合并音频和视频,然后我需要在 Mediaplayer 中播放音频文件。如何在 IOS 中合并音频和视频。有没有这方面的源代码。请给我一些建议 提前致谢 最佳答案 使用这个 AV
我有一个奇怪的问题:我想用两个声音文件和静音生成一个新的声音文件。 声音1:2秒长+静音:2秒静音+sound2:2秒长 当我尝试下面的代码时,我得到了一个 6 秒长的声音文件,其中包含所有部分,但顺
我正在尝试放置我正在创建的视频的一部分的一些文本。我意识到我可以使用 CALayers 在整个视频长度上使用带有文本的图层,但如何仅在视频的一部分上执行此操作?经过一些研究,我发现您可以创建 AVMe
我创建了一个包含多个视频剪辑的合成,我还添加了一些空白范围,用于显示带有 CoreAnimation 的标题屏幕。 |--Video1--|--NoVideo(AVsyncLayer)--|--Vid
我正在开发一款简单的视频编辑应用。我想在我的应用程序中添加慢动作。我注意到 AVMutableCompositionTrack 类中有一个 scaleTimeRange 方法,因此我使用它来实现我的目
感谢阅读我的问题。 :D 这是我的问题 我正在使用 AVAudioRecorder 将音频文件录制为声乐资源,目的是在不同时间将声乐插入我的视频中。但是当我将人声插入我的人声轨道(AVMutableC
我遇到的问题是在两个 wav 文件之间放置可变数量的静音。 到目前为止,我的方法如下: 首先我创建了一个 AVMutableComposition和一个 AVMutableCompositionTra
从最近 2 天开始,我一直在为 AVMutableCompositionTrack 挠头,在我的例子中,它拍摄的是纵向视频,但在添加水印后它变成了横向。 这是我的代码: AVURLAsset* vid
调用时有时会出现如下错误 - (BOOL)insertTimeRange:(CMTimeRange)timeRange ofTrack:(AVAssetTrack *)track atTime:(CM
下面的代码使用 AVMutableComposition 导出视频。但是在导出的视频中,如果您希望图像在源视频结束后显示 3 秒,有没有办法使用 AVMutableCompositionTrack 来
首先我必须说我喜欢这个论坛,它帮助了我很多时间。我有一个问题,但我在任何地方都找不到答案,所以这是我在这里的第一个问题。 我的问题是: 我有一个由 AVPlayerItem 表示的视频,用户可以使用将
我是一名优秀的程序员,十分优秀!