gpt4 book ai didi

objective-c - AVAssetWriter 在 appendSampleBuffer : 之后变为 AVAssetWriterStatusFailed

转载 作者:搜寻专家 更新时间:2023-10-30 19:49:06 25 4
gpt4 key购买 nike

我正在尝试使用 AVAssetWriter 进行屏幕录制,它也接受音频输入。但是,我一直坚持这个错误,在 appendSampleBuffer: (在 encodeAudioFrame: 内)几次调用后,AVAssetWriter 有时会变成 AVAssetWriterStatusFailed

Failed: Error Domain=AVFoundationErrorDomain Code=-11800 "The operation could not be completed" UserInfo=0x32b570 {NSLocalizedDescription=The operation could not be completed, NSUnderlyingError=0x70d710 "The operation couldn’t be completed. (OSStatus error -12737.)", NSLocalizedFailureReason=An unknown error occurred (-12737)}

几个观察结果:

  • 一旦进入此状态,后续的记录尝试也将返回 AVAssetWriterStatusFailed,即使我使用不同的记录器对象也是如此。
  • 当我注释掉录音 block 时,错误没有出现。
  • 但是当我注释掉视频录制 block 并且没有修改任何传入的 CMSampleBufferRef 时,错误仍然出现。

我们将不胜感激。

下面是我正在使用的代码,为简洁起见省略了几个部分。我目前使用的是 OSX 10.9 SDK,ARC 已关闭。

- (BOOL) startRecording
{
if (!isRecording)
{
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
[self startCapture];

[self setUpWriter];

startedAt = [NSDate date];
isRecording = YES;

while (isRecording)
{
NSAutoreleasePool* pool = [NSAutoreleasePool new];

NSTimeInterval offset = [[NSDate date] timeIntervalSinceDate:startedAt];

CMTime tiem = CMTimeMakeWithSeconds(offset - pauseDelta, 1000);

[self encodeFrameAtTime:tiem];

[pool drain];

sleep(0.05f);
}

[self endCapture];

[self completeRecordingSession];
});
}

return YES;
}

- (void) stopRecording {
isRecording = NO;
}

-(void) startCapture
{
AVCaptureDevice* microphone = x //Device selection code omitted

videoCaptureSession = [[AVCaptureSession alloc] init];

videoCaptureSession.sessionPreset = AVCaptureSessionPresetHigh;

//------------------------------------------

NSError* err = nil;

audioInput = [AVCaptureDeviceInput deviceInputWithDevice:microphone error:&err];

[videoCaptureSession addInput:audioInput];

//------------------------------------------

audioOutput = [[AVCaptureAudioDataOutput alloc] init];

queue = dispatch_queue_create("videoQueue", NULL);

[audioOutput setSampleBufferDelegate:self queue:queue];

[videoCaptureSession addOutput:audioOutput];

audioDelta = -1;
[videoCaptureSession startRunning];
}


-(void) endCapture
{
[videoCaptureSession stopRunning];

[videoCaptureSession removeInput:audioInput];
[videoCaptureSession removeOutput:audioOutput];

[audioOutput release];
audioOutput = nil;

audioInput = nil;

[videoCaptureSession release];
videoCaptureSession = nil;

dispatch_release(queue);
}

-(BOOL) setUpWriter
{
//delete the file.
{
NSFileManager* fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:self.moviePath]) {
NSError* error;
if ([fileManager removeItemAtPath:self.moviePath error:&error] == NO) {
NSLog(@"Could not delete old recording file at path: %@", self.moviePath);
}
}
}

mCaptureRect = NSRectToCGRect([screen frame]);

int FWidth = mCaptureRect.size.width;
int FHeight = mCaptureRect.size.height;

int bitRate = FWidth * FHeight * 8;

videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:self.moviePath] fileType:AVFileTypeMPEG4 error:nil];
NSParameterAssert(videoWriter);

//Configure video
NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:bitRate], AVVideoAverageBitRateKey,
nil];

NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
codecSettings,AVVideoCompressionPropertiesKey,
[NSNumber numberWithInt:FWidth], AVVideoWidthKey,
[NSNumber numberWithInt:FHeight], AVVideoHeightKey,
nil];

videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];

NSParameterAssert(videoWriterInput);
videoWriterInput.expectsMediaDataInRealTime = YES;
NSDictionary* bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey,
[NSNumber numberWithInt:FWidth], kCVPixelBufferWidthKey,
[NSNumber numberWithInt:FHeight], kCVPixelBufferHeightKey,
nil];

avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:bufferAttributes];

//*
//Configure Audio
AudioChannelLayout acl;
bzero(&acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;

NSDictionary* audioSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
[NSNumber numberWithInt:64000], AVEncoderBitRateKey,
nil ];

audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioSettings];
audioWriterInput.expectsMediaDataInRealTime = YES;

//add input
[videoWriter addInput:videoWriterInput];
[videoWriter addInput:audioWriterInput];

return YES;
}

- (void) cleanupWriter {
[videoWriter release];
videoWriter = nil;
avAdaptor = nil;
videoWriterInput = nil;
startedAt = nil;
audioWriterInput = nil;
}

- (void) encodeFrameAtTime:(CMTime)timestamp
{
if(!isRecording) return;

if(videoWriter == nil) return;

if(videoWriter.status == AVAssetWriterStatusFailed)
{
return;
}

if(videoWriter.status != AVAssetWriterStatusWriting)
{
if(videoWriter.status != AVAssetWriterStatusUnknown)
return;

[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:timestamp];

startTime = CMTimeGetSeconds(timestamp);
}

timestamp = CMTimeMakeWithSeconds(startTime + CMTimeGetSeconds(timestamp), 1000);

[self writeVideoFrameAtTime:timestamp];
}

-(void) writeVideoFrameAtTime:(CMTime)time {
if (![videoWriterInput isReadyForMoreMediaData])
{
}
else
{
/*
CVPixelBufferRef manipulation omitted...
*/

{
BOOL success = [avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];

if(videoWriter.status == AVAssetWriterStatusFailed) NSLog(@"Failed: %@", videoWriter.error);
if (!success) NSLog(@"Warning: Unable to write buffer to video");
}

CVPixelBufferRelease(pixelBuffer);

CGImageRelease(cgImage);
}
}

-(void) encodeAudioFrame:(CMSampleBufferRef)buffer
{
if(!isRecording) return;

CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(buffer);

if(videoWriter.status != AVAssetWriterStatusWriting)
{
//Wait for video thread to start the writer
return;
}

if(![audioWriterInput isReadyForMoreMediaData])
return;

//*
NSTimeInterval offset = [[NSDate date] timeIntervalSinceDate:startedAt];

if(audioDelta == -1)
{
audioDelta = offset - CMTimeGetSeconds(timestamp);
}

//Adjusts CMSampleBufferRef's timestamp to match the video stream's zero-based timestamp
CMItemCount count;
CMTime newTimestamp = CMTimeMakeWithSeconds(CMTimeGetSeconds(timestamp) + audioDelta - pauseDelta, 1000);

CMSampleBufferGetSampleTimingInfoArray(buffer, 0, nil, &count);
CMSampleTimingInfo* pInfo = malloc(sizeof(CMSampleTimingInfo) * count);
CMSampleBufferGetSampleTimingInfoArray(buffer, count, pInfo, &count);

for(CMItemCount i = 0; i < count; i++)
{
pInfo[i].decodeTimeStamp = newTimestamp;
pInfo[i].presentationTimeStamp = newTimestamp;
}

CMSampleBufferRef newBuffer;
CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, buffer, count, pInfo, &newBuffer);
free(pInfo);

timestamp = CMSampleBufferGetPresentationTimeStamp(newBuffer);

BOOL res = [audioWriterInput appendSampleBuffer:newBuffer];
}

- (void) completeRecordingSession {
@autoreleasepool {
if(videoWriter.status != AVAssetWriterStatusWriting)
{
while (videoWriter.status == AVAssetWriterStatusUnknown)
{
[NSThread sleepForTimeInterval:0.5f];
}

int status = videoWriter.status;

while (status == AVAssetWriterStatusUnknown)
{
NSLog(@"Waiting...");
[NSThread sleepForTimeInterval:0.5f];
status = videoWriter.status;
}
}

@synchronized(self)
{
[videoWriter finishWriting];
[self cleanupWriter];
}
}
}

-(void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
if(!CMSampleBufferDataIsReady(sampleBuffer))
return;

@autoreleasepool {
if(captureOutput == audioOutput)
{
if(isRecording && !isPaused)
{
[self encodeAudioFrame:sampleBuffer];
}
}
}
}

最佳答案

我的 swift 代码遇到了完全相同的问题。我发现我的电脑内存不足。因此,请仔细检查您是否有足够的可用内存。

关于objective-c - AVAssetWriter 在 appendSampleBuffer : 之后变为 AVAssetWriterStatusFailed,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/24199940/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com