gpt4 book ai didi

ios - AVAssetReader/AVAssetWriter 加入不同分辨率的mp4文件

转载 作者:行者123 更新时间:2023-12-01 18:14:10 37 4
gpt4 key购买 nike

我正在编写一个 iPad 应用程序,我需要在其中加入不同分辨率的 mp4 文件。为此,我结合使用 AVAssetReader 来读取 mp4 源文件,并使用 AVAssetWriter 将这些源文件写入单个 mp4 输出文件。

我曾尝试使用 AVAssetExportSession,但我遇到的问题是不同连接文件之间存在黑框。

我现在面临的问题是一切似乎都正常,但从未调用 AVAssetWriter 的完成处理程序。

这是我的选择器,将 mp4 文件 URL 列表、单个输出文件 URL 和完成处理程序作为输入。

- (void)resizeAndJoinVideosAtURLs:(NSArray *)videoURLs toOutputURL:(NSURL *)outputURL withHandler:(void(^)(NSURL *fileURL))handler
{
/*
First step: create the writer and writer input
*/
NSError *error = nil;
self.videoAssetWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeMPEG4 error:&error];

NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,[NSNumber numberWithInt:640], AVVideoWidthKey,[NSNumber numberWithInt:480], AVVideoHeightKey,nil];

AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
videoWriterInput.expectsMediaDataInRealTime = NO;

if([self.videoAssetWriter canAddInput:videoWriterInput])
{
[self.videoAssetWriter addInput:videoWriterInput];
[self.videoAssetWriter startWriting];
[self.videoAssetWriter startSessionAtSourceTime:kCMTimeZero];

/*
Second step: for each video URL given create a reader and an reader input
*/

for(NSURL *videoURL in videoURLs)
{
NSLog(@"Processing file: %@",videoURL);
AVAsset *videoAsset = [[AVURLAsset alloc] initWithURL:videoURL options:nil];
AVAssetReader *videoAssetReader = [[AVAssetReader alloc] initWithAsset:videoAsset error:&error];
AVAssetTrack *videoAssetTrack = [videoAsset tracksWithMediaType:AVMediaTypeVideo].firstObject;
NSDictionary *videoOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];

AVAssetReaderTrackOutput *videoAssetTrackOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoAssetTrack outputSettings:videoOptions];
videoAssetTrackOutput.alwaysCopiesSampleData = NO;

if([videoAssetReader canAddOutput:videoAssetTrackOutput])
{
[videoAssetReader addOutput:videoAssetTrackOutput];
[videoAssetReader startReading];

/*
Step three: copy the buffers from the reader to the writer
*/
while ([videoAssetReader status] == AVAssetReaderStatusReading)
{
if(![videoWriterInput isReadyForMoreMediaData]) continue;

CMSampleBufferRef buffer = [videoAssetTrackOutput copyNextSampleBuffer];
if(buffer)
{
[videoWriterInput appendSampleBuffer:buffer];
CFRelease(buffer);
}
}


} else NSLog(@"ERROR: %@",error);
}

[videoWriterInput markAsFinished];

} else NSLog(@"ERROR: %@",error);

__weak ClipBuilder *weakself = self;
[self.videoAssetWriter finishWritingWithCompletionHandler:^{
handler(outputURL);
weakself.videoAssetWriter = nil;
}];
}

我的输出文件存在并且 AVAssetWriter 存在,因为它是一个属性但仍然没有调用完成处理程序。对此有何解释?

感谢您的帮助。

有什么可以解释的?

最佳答案

这是我最终实现的解决方案,结合 AVAssetReader/AVAssetWriter 加入不同分辨率的 mp4 文件。

- (void)reencodeComposition:(AVComposition *)composition toMP4File:(NSURL *)mp4FileURL withCompletionHandler:(void (^)(void))handler
{
self.status = EncoderStatusEncoding;

/*
Create the asset writer to write the file on disk
*/

NSError *error = nil;
if([[NSFileManager defaultManager] fileExistsAtPath:mp4FileURL.path isDirectory:nil])
{
if(![[NSFileManager defaultManager] removeItemAtPath:mp4FileURL.path error:&error])
{
[self failWithError:error withCompletionHandler:handler];
return;
}
}

self.assetWriter = [[AVAssetWriter alloc] initWithURL:mp4FileURL fileType:AVFileTypeMPEG4 error:&error];

if(self.assetWriter)
{
/*
Get the audio and video track of the composition
*/
AVAssetTrack *videoAssetTrack = [composition tracksWithMediaType:AVMediaTypeVideo].firstObject;
AVAssetTrack *audioAssetTrack = [composition tracksWithMediaType:AVMediaTypeAudio].firstObject;

NSDictionary *videoSettings = @{AVVideoCodecKey:AVVideoCodecH264, AVVideoWidthKey:@(self.imageWidth), AVVideoHeightKey:@(self.imageHeight)};

/*
Add an input to be able to write the video in the file
*/
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
videoWriterInput.expectsMediaDataInRealTime = YES;

if([self.assetWriter canAddInput:videoWriterInput])
{
[self.assetWriter addInput:videoWriterInput];

/*
Add an input to be able to write the audio in the file
*/
// Use this only if you know the format
// CMFormatDescriptionRef audio_fmt_desc_ = nil;
//
// AudioStreamBasicDescription audioFormat;
// bzero(&audioFormat, sizeof(audioFormat));
// audioFormat.mSampleRate = 44100;
// audioFormat.mFormatID = kAudioFormatMPEG4AAC;
// audioFormat.mFramesPerPacket = 1024;
// audioFormat.mChannelsPerFrame = 2;
// int bytes_per_sample = sizeof(float);
// audioFormat.mFormatFlags = kAudioFormatFlagIsFloat | kAudioFormatFlagIsPacked;
//
// audioFormat.mBitsPerChannel = bytes_per_sample * 8;
// audioFormat.mBytesPerPacket = bytes_per_sample * 2;
// audioFormat.mBytesPerFrame = bytes_per_sample * 2;
//
// CMAudioFormatDescriptionCreate(kCFAllocatorDefault,&audioFormat,0,NULL,0,NULL,NULL,&audio_fmt_desc_);
//
// AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:nil sourceFormatHint:audio_fmt_desc_];
//
// AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:nil sourceFormatHint:((__bridge CMAudioFormatDescriptionRef)audioAssetTrack.formatDescriptions.firstObject)];

audioWriterInput.expectsMediaDataInRealTime = YES;

if([self.assetWriter canAddInput:audioWriterInput])
{
[self.assetWriter addInput:audioWriterInput];
[self.assetWriter startWriting];
[self.assetWriter startSessionAtSourceTime:kCMTimeZero];

/*
Create the asset reader to read the mp4 files on the disk
*/
AVAssetReader *assetReader = [[AVAssetReader alloc] initWithAsset:composition error:&error];
NSDictionary *videoOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];

/*
Add an output to be able to retrieve the video in the files
*/
AVAssetReaderTrackOutput *videoAssetTrackOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoAssetTrack outputSettings:videoOptions];
videoAssetTrackOutput.alwaysCopiesSampleData = NO;

if([assetReader canAddOutput:videoAssetTrackOutput])
{
[assetReader addOutput:videoAssetTrackOutput];
/*
Add an output to be able to retrieve the video in the files
*/
AVAssetReaderTrackOutput *audioAssetTrackOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:audioAssetTrack outputSettings:nil];
videoAssetTrackOutput.alwaysCopiesSampleData = NO;

if([assetReader canAddOutput:audioAssetTrackOutput])
{
[assetReader addOutput:audioAssetTrackOutput];

[assetReader startReading];

/*
Read the mp4 files until the end and copy them in the output file
*/
dispatch_group_t encodingGroup = dispatch_group_create();

dispatch_group_enter(encodingGroup);
[audioWriterInput requestMediaDataWhenReadyOnQueue:self.encodingQueue usingBlock:^{
while ([audioWriterInput isReadyForMoreMediaData])
{
CMSampleBufferRef nextSampleBuffer = [audioAssetTrackOutput copyNextSampleBuffer];

if (nextSampleBuffer)
{
[audioWriterInput appendSampleBuffer:nextSampleBuffer];
CFRelease(nextSampleBuffer);
}
else
{
[audioWriterInput markAsFinished];
dispatch_group_leave(encodingGroup);
break;
}
}
}];

dispatch_group_enter(encodingGroup);
[videoWriterInput requestMediaDataWhenReadyOnQueue:self.encodingQueue usingBlock:^{
while ([videoWriterInput isReadyForMoreMediaData])
{
CMSampleBufferRef nextSampleBuffer = [videoAssetTrackOutput copyNextSampleBuffer];

if (nextSampleBuffer)
{
[videoWriterInput appendSampleBuffer:nextSampleBuffer];
CFRelease(nextSampleBuffer);
}
else
{
[videoWriterInput markAsFinished];
dispatch_group_leave(encodingGroup);
break;
}
}
}];

dispatch_group_wait(encodingGroup, DISPATCH_TIME_FOREVER);

} else [self failWithError:error withCompletionHandler:handler];
} else [self failWithError:error withCompletionHandler:handler];
} else [self failWithError:error withCompletionHandler:handler];
} else [self failWithError:error withCompletionHandler:handler];

__weak Encoder *weakself = self;
[self.assetWriter finishWritingWithCompletionHandler:^{
self.status = EncoderStatusCompleted;
handler();
weakself.assetWriter = nil;
self.encodingQueue = nil;
}];
}
else [self failWithError:error withCompletionHandler:handler];
}

- (dispatch_queue_t)encodingQueue
{
if(!_encodingQueue)
{
_encodingQueue = dispatch_queue_create("com.myProject.encoding", NULL);
}
return _encodingQueue;
}

此实现适用于我的项目 TS2MP4但我最终不需要它。

关于ios - AVAssetReader/AVAssetWriter 加入不同分辨率的mp4文件,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/24390576/

37 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com