- Java 双重比较
- java - 比较器与 Apache BeanComparator
- Objective-C 完成 block 导致额外的方法调用?
- database - RESTful URI 是否应该公开数据库主键?
我正在尝试使用 AVAssetWriter 进行屏幕录制,它也接受音频输入。但是,我一直坚持这个错误,在 appendSampleBuffer:
(在 encodeAudioFrame:
内)几次调用后,AVAssetWriter 有时会变成 AVAssetWriterStatusFailed
Failed: Error Domain=AVFoundationErrorDomain Code=-11800 "The operation could not be completed" UserInfo=0x32b570 {NSLocalizedDescription=The operation could not be completed, NSUnderlyingError=0x70d710 "The operation couldn’t be completed. (OSStatus error -12737.)", NSLocalizedFailureReason=An unknown error occurred (-12737)}
几个观察结果:
我们将不胜感激。
下面是我正在使用的代码,为简洁起见省略了几个部分。我目前使用的是 OSX 10.9 SDK,ARC 已关闭。
- (BOOL) startRecording
{
if (!isRecording)
{
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
[self startCapture];
[self setUpWriter];
startedAt = [NSDate date];
isRecording = YES;
while (isRecording)
{
NSAutoreleasePool* pool = [NSAutoreleasePool new];
NSTimeInterval offset = [[NSDate date] timeIntervalSinceDate:startedAt];
CMTime tiem = CMTimeMakeWithSeconds(offset - pauseDelta, 1000);
[self encodeFrameAtTime:tiem];
[pool drain];
sleep(0.05f);
}
[self endCapture];
[self completeRecordingSession];
});
}
return YES;
}
- (void) stopRecording {
isRecording = NO;
}
-(void) startCapture
{
AVCaptureDevice* microphone = x //Device selection code omitted
videoCaptureSession = [[AVCaptureSession alloc] init];
videoCaptureSession.sessionPreset = AVCaptureSessionPresetHigh;
//------------------------------------------
NSError* err = nil;
audioInput = [AVCaptureDeviceInput deviceInputWithDevice:microphone error:&err];
[videoCaptureSession addInput:audioInput];
//------------------------------------------
audioOutput = [[AVCaptureAudioDataOutput alloc] init];
queue = dispatch_queue_create("videoQueue", NULL);
[audioOutput setSampleBufferDelegate:self queue:queue];
[videoCaptureSession addOutput:audioOutput];
audioDelta = -1;
[videoCaptureSession startRunning];
}
-(void) endCapture
{
[videoCaptureSession stopRunning];
[videoCaptureSession removeInput:audioInput];
[videoCaptureSession removeOutput:audioOutput];
[audioOutput release];
audioOutput = nil;
audioInput = nil;
[videoCaptureSession release];
videoCaptureSession = nil;
dispatch_release(queue);
}
-(BOOL) setUpWriter
{
//delete the file.
{
NSFileManager* fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:self.moviePath]) {
NSError* error;
if ([fileManager removeItemAtPath:self.moviePath error:&error] == NO) {
NSLog(@"Could not delete old recording file at path: %@", self.moviePath);
}
}
}
mCaptureRect = NSRectToCGRect([screen frame]);
int FWidth = mCaptureRect.size.width;
int FHeight = mCaptureRect.size.height;
int bitRate = FWidth * FHeight * 8;
videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:self.moviePath] fileType:AVFileTypeMPEG4 error:nil];
NSParameterAssert(videoWriter);
//Configure video
NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:bitRate], AVVideoAverageBitRateKey,
nil];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
codecSettings,AVVideoCompressionPropertiesKey,
[NSNumber numberWithInt:FWidth], AVVideoWidthKey,
[NSNumber numberWithInt:FHeight], AVVideoHeightKey,
nil];
videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSParameterAssert(videoWriterInput);
videoWriterInput.expectsMediaDataInRealTime = YES;
NSDictionary* bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey,
[NSNumber numberWithInt:FWidth], kCVPixelBufferWidthKey,
[NSNumber numberWithInt:FHeight], kCVPixelBufferHeightKey,
nil];
avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:bufferAttributes];
//*
//Configure Audio
AudioChannelLayout acl;
bzero(&acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary* audioSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
[NSNumber numberWithInt:64000], AVEncoderBitRateKey,
nil ];
audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioSettings];
audioWriterInput.expectsMediaDataInRealTime = YES;
//add input
[videoWriter addInput:videoWriterInput];
[videoWriter addInput:audioWriterInput];
return YES;
}
- (void) cleanupWriter {
[videoWriter release];
videoWriter = nil;
avAdaptor = nil;
videoWriterInput = nil;
startedAt = nil;
audioWriterInput = nil;
}
- (void) encodeFrameAtTime:(CMTime)timestamp
{
if(!isRecording) return;
if(videoWriter == nil) return;
if(videoWriter.status == AVAssetWriterStatusFailed)
{
return;
}
if(videoWriter.status != AVAssetWriterStatusWriting)
{
if(videoWriter.status != AVAssetWriterStatusUnknown)
return;
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:timestamp];
startTime = CMTimeGetSeconds(timestamp);
}
timestamp = CMTimeMakeWithSeconds(startTime + CMTimeGetSeconds(timestamp), 1000);
[self writeVideoFrameAtTime:timestamp];
}
-(void) writeVideoFrameAtTime:(CMTime)time {
if (![videoWriterInput isReadyForMoreMediaData])
{
}
else
{
/*
CVPixelBufferRef manipulation omitted...
*/
{
BOOL success = [avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
if(videoWriter.status == AVAssetWriterStatusFailed) NSLog(@"Failed: %@", videoWriter.error);
if (!success) NSLog(@"Warning: Unable to write buffer to video");
}
CVPixelBufferRelease(pixelBuffer);
CGImageRelease(cgImage);
}
}
-(void) encodeAudioFrame:(CMSampleBufferRef)buffer
{
if(!isRecording) return;
CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(buffer);
if(videoWriter.status != AVAssetWriterStatusWriting)
{
//Wait for video thread to start the writer
return;
}
if(![audioWriterInput isReadyForMoreMediaData])
return;
//*
NSTimeInterval offset = [[NSDate date] timeIntervalSinceDate:startedAt];
if(audioDelta == -1)
{
audioDelta = offset - CMTimeGetSeconds(timestamp);
}
//Adjusts CMSampleBufferRef's timestamp to match the video stream's zero-based timestamp
CMItemCount count;
CMTime newTimestamp = CMTimeMakeWithSeconds(CMTimeGetSeconds(timestamp) + audioDelta - pauseDelta, 1000);
CMSampleBufferGetSampleTimingInfoArray(buffer, 0, nil, &count);
CMSampleTimingInfo* pInfo = malloc(sizeof(CMSampleTimingInfo) * count);
CMSampleBufferGetSampleTimingInfoArray(buffer, count, pInfo, &count);
for(CMItemCount i = 0; i < count; i++)
{
pInfo[i].decodeTimeStamp = newTimestamp;
pInfo[i].presentationTimeStamp = newTimestamp;
}
CMSampleBufferRef newBuffer;
CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, buffer, count, pInfo, &newBuffer);
free(pInfo);
timestamp = CMSampleBufferGetPresentationTimeStamp(newBuffer);
BOOL res = [audioWriterInput appendSampleBuffer:newBuffer];
}
- (void) completeRecordingSession {
@autoreleasepool {
if(videoWriter.status != AVAssetWriterStatusWriting)
{
while (videoWriter.status == AVAssetWriterStatusUnknown)
{
[NSThread sleepForTimeInterval:0.5f];
}
int status = videoWriter.status;
while (status == AVAssetWriterStatusUnknown)
{
NSLog(@"Waiting...");
[NSThread sleepForTimeInterval:0.5f];
status = videoWriter.status;
}
}
@synchronized(self)
{
[videoWriter finishWriting];
[self cleanupWriter];
}
}
}
-(void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
if(!CMSampleBufferDataIsReady(sampleBuffer))
return;
@autoreleasepool {
if(captureOutput == audioOutput)
{
if(isRecording && !isPaused)
{
[self encodeAudioFrame:sampleBuffer];
}
}
}
}
最佳答案
我的 swift 代码遇到了完全相同的问题。我发现我的电脑内存不足。因此,请仔细检查您是否有足够的可用内存。
关于objective-c - AVAssetWriter 在 appendSampleBuffer : 之后变为 AVAssetWriterStatusFailed,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/24199940/
我遵循了一本名为“Sitepoint Full Stack Javascript with MEAN”的书中的教程,我刚刚完成了第 6 章,应该已经创建了一个带有“数据库”的“服务器”。数据库只不过是
在 Jquery 中,我创建两个数组,一个嵌入另一个数组,就像这样...... arrayOne = [{name:'a',value:1}, {name:'b',value:2}] var arra
这个问题在这里已经有了答案: What is the explanation for these bizarre JavaScript behaviours mentioned in the 'Wa
我被放在别人的代码上,有一个类用作其他组件的基础。当我尝试 ng serve --aot(或 build --prod)时,我得到以下信息。 @Component({ ...,
我正在测试一些代码,并使用数据创建了一个 json 文件。 问题是我在警报中收到“[object Object],[object Object]”。没有数据。 我做错了什么? 这是代码:
我想打印 [object Object],[object Object] 以明智地 "[[{ 'x': '1', 'y': '0' }, { 'x': '2', 'y': '1' }]]"; 在 ja
我有一个功能 View ,我正在尝试以特殊格式的方式输出。但我无法让列表功能正常工作。 我得到的唯一返回是[object Object][object Object] [object Object]
在使用优秀的 Sim.js 和 Three.js 库处理 WebGL 项目时,我偶然发现了下一个问题: 一路走来,它使用了 THREE.Ray 的下一个构造函数: var ray = new THRE
我正在使用 Material UI 进行多重选择。这是我的代码。 {listStates.map(col => (
我的代码使用ajax: $("#keyword").keyup(function() { var keyword = $("#keyword").val(); if (keyword.
我遇到了下一个错误,无法理解如何解决它。 Can't resolve all parameters for AuthenticationService: ([object Object], ?, [o
我正在尝试创建一个显示动态复选框的表单,至少应选中其中一个才能继续。我还需要获取一组选中的复选框。 这是组件的代码: import { Component, OnInit } from '@angul
我正在开发 NodeJs 应用程序,它是博客应用程序。我使用了快速验证器,我尝试在 UI 端使用快速闪存消息将帖子保存在数据库中之前使用闪存消息验证数据,我成功地将数据保存在数据库中,但在提交表单后消
我知道有些人问了同样的问题并得到了解答。我已经查看了所有这些,但仍然无法解决我的问题。我有一个 jquery snipet,它将值发送到处理程序,处理程序处理来自 JS 的值并将数据作为 JSON 数
我继承了一个非常草率的项目,我的任务是解释为什么它不好。我注意到他们在整个代码中都进行了这样的比较 (IQueryable).FirstOrDefault(x => x.Facility == fac
我只是在删除数组中的对象时偶然发现了这一点。 代码如下: friends = []; friends.push( { a: 'Nexus', b: 'Muffi
这两个代码片段有什么区别: object = nil; [object release] 对比 [object release]; object = nil; 哪个是最佳实践? 最佳答案 object
我应该为其他人将从中继承的第一个父对象传递哪个参数,哪个参数更有效 Object.create(Object.prototype) Object.create(Object) Object.creat
我在不同的对象上安排不同的选择器 [self performSelector:@selector(doSmth) withObject:objectA afterDelay:1]; [self per
NSLog(@"%p", &object); 和 NSLog(@"%p", object); 有什么区别? 两者似乎都打印出一个内存地址,但我不确定哪个是对象的实际内存地址。 最佳答案 这就是我喜欢的
我是一名优秀的程序员,十分优秀!