- html - 出于某种原因,IE8 对我的 Sass 文件中继承的 html5 CSS 不友好?
- JMeter 在响应断言中使用 span 标签的问题
- html - 在 :hover and :active? 上具有不同效果的 CSS 动画
- html - 相对于居中的 html 内容固定的 CSS 重复背景?
我正在尝试使用 AVAssetWriter 捕捉电影,在 iphone 5 中一切正常,捕捉和保存电影就像一个魅力。
但是当我尝试在 iphone 4 中捕捉电影时,样本缓冲区会跳过一些帧并且电影效果不佳。
所以,这是我的代码:
- (void) initCaptureSession{
// openSession and set quality to 1280x720
session = [[AVCaptureSession alloc] init];
if([session canSetSessionPreset:AVCaptureSessionPreset640x480]) session.sessionPreset = AVCaptureSessionPresetHigh;
// get devices for audio and video
deviceVideo = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
deviceAudio = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
// create input of audio and video
inputVideo = [AVCaptureDeviceInput deviceInputWithDevice:deviceVideo error:&error];
if (!inputVideo) NSLog(@"ERROR: trying to open camera: %@", error);
inputAudio = [AVCaptureDeviceInput deviceInputWithDevice:deviceAudio error:&error];
if (!inputAudio) NSLog(@"ERROR: trying to open audio: %@", error);
// CMTime maxDuration = CMTimeMake(60, 1);
// create output audio and video
outputVideo = [[AVCaptureVideoDataOutput alloc] init];
outputVideo.alwaysDiscardsLateVideoFrames = NO;
outputVideo.videoSettings = [NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
outputAudio = [[AVCaptureAudioDataOutput alloc] init];
// add inputs and outputs in the current session
[session beginConfiguration];
if ([session canAddInput:inputVideo])[session addInput:inputVideo];
if ([session canAddInput:inputAudio])[session addInput:inputAudio];
if ([session canAddOutput:outputVideo]) [session addOutput:outputVideo];
if ([session canAddOutput:outputAudio]) [session addOutput:outputAudio];
[session commitConfiguration];
// tourn of the torch
[deviceVideo lockForConfiguration:&error];
if([deviceVideo hasTorch] && [deviceVideo isTorchModeSupported:AVCaptureTorchModeOff]) [deviceVideo setTorchMode:AVCaptureTorchModeOff];
[deviceVideo unlockForConfiguration];
[self configDevice];
// create the preview view to show the video
captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[captureVideoPreviewLayer setBackgroundColor:[[UIColor blackColor] CGColor]];
[captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
captureVideoPreviewLayer.frame = viewPreview.bounds;
[viewPreview.layer addSublayer:captureVideoPreviewLayer];
CALayer *viewLayer = viewPreview.layer;
[viewLayer setMasksToBounds:YES];
[captureVideoPreviewLayer setFrame:[viewLayer bounds]];
[viewLayer addSublayer:captureVideoPreviewLayer];
// dispatch outputs to delegate in a queue
dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL);
[outputVideo setSampleBufferDelegate:self queue:queue];
[outputAudio setSampleBufferDelegate:self queue:queue];
// dispatch_release(queue);
[session startRunning];
}
-(BOOL) setupWriter{
urlOutput = [self tempFileURL];
NSError *error = nil;
videoWriter = [[AVAssetWriter alloc] initWithURL:urlOutput fileType:AVFileTypeMPEG4 error:&error];
NSParameterAssert(videoWriter);
// Add metadata
NSArray *existingMetadataArray = videoWriter.metadata;
NSMutableArray *newMetadataArray = nil;
if (existingMetadataArray) {
newMetadataArray = [existingMetadataArray mutableCopy];
} else {
newMetadataArray = [[NSMutableArray alloc] init];
}
AVMutableMetadataItem *mutableItemLocation = [[AVMutableMetadataItem alloc] init];
mutableItemLocation.keySpace = AVMetadataKeySpaceCommon;
mutableItemLocation.key = AVMetadataCommonKeyLocation;
mutableItemLocation.value = [NSString stringWithFormat:@"%+08.4lf%+09.4lf/", location.latitude, location.longitude];
AVMutableMetadataItem *mutableItemModel = [[AVMutableMetadataItem alloc] init];
mutableItemModel.keySpace = AVMetadataKeySpaceCommon;
mutableItemModel.key = AVMetadataCommonKeyModel;
mutableItemModel.value = [[UIDevice currentDevice] model];
[newMetadataArray addObject:mutableItemLocation];
[newMetadataArray addObject:mutableItemModel];
videoWriter.metadata = newMetadataArray;
// video Configuration
NSDictionary *videoCleanApertureSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:640], AVVideoCleanApertureWidthKey,
[NSNumber numberWithInt:360], AVVideoCleanApertureHeightKey,
[NSNumber numberWithInt:2], AVVideoCleanApertureHorizontalOffsetKey,
[NSNumber numberWithInt:2], AVVideoCleanApertureVerticalOffsetKey,
nil];
NSDictionary *videoAspectRatioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:1], AVVideoPixelAspectRatioHorizontalSpacingKey,
[NSNumber numberWithInt:1],AVVideoPixelAspectRatioVerticalSpacingKey,
nil];
NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:1024000], AVVideoAverageBitRateKey,
[NSNumber numberWithInt:90],AVVideoMaxKeyFrameIntervalKey,
videoCleanApertureSettings, AVVideoCleanApertureKey,
videoAspectRatioSettings, AVVideoPixelAspectRatioKey,
AVVideoProfileLevelH264Main30, AVVideoProfileLevelKey,
nil];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
codecSettings,AVVideoCompressionPropertiesKey,
[NSNumber numberWithInt:640], AVVideoWidthKey,
[NSNumber numberWithInt:360], AVVideoHeightKey,
nil];
videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSParameterAssert(videoWriterInput);
videoWriterInput.expectsMediaDataInRealTime = YES;
// Add the audio input
AudioChannelLayout acl;
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
NSDictionary* audioOutputSettings = nil;
// Both type of audio inputs causes output video file to be corrupted.
// if( NO ) {
// should work from iphone 3GS on and from ipod 3rd generation
audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
[ NSNumber numberWithInt: 2 ], AVNumberOfChannelsKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
nil];
// } else {
// // should work on any device requires more space
// audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
// [ NSNumber numberWithInt: kAudioFormatAppleLossless ], AVFormatIDKey,
// [ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
// [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
// [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
// [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
// nil ];
// }
audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeAudio outputSettings: audioOutputSettings];
audioWriterInput.expectsMediaDataInRealTime = YES;
// add input
[videoWriter addInput:videoWriterInput];
[videoWriter addInput:audioWriterInput];
return YES;
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
if( !CMSampleBufferDataIsReady(sampleBuffer) ){
NSLog( @"sample buffer is not ready. Skipping sample" );
return;
}
if(isRecording == YES ){
lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
if(videoWriter.status != AVAssetWriterStatusWriting ){
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:lastSampleTime];
}
if( captureOutput == outputVideo ){
[self newVideoSample:sampleBuffer];
} else if( captureOutput == outputAudio) {
[self newAudioSample:sampleBuffer];
}
}
}
-(void) newVideoSample:(CMSampleBufferRef)sampleBuffer{
if( isRecording ){
if( videoWriter.status > AVAssetWriterStatusWriting ) {
NSLog(@"Warning: writer status is %d", videoWriter.status);
if( videoWriter.status == AVAssetWriterStatusFailed )
NSLog(@"Error: %@", videoWriter.error);
return;
}
while (!videoWriterInput.readyForMoreMediaData) {
NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];
[[NSRunLoop currentRunLoop] runUntilDate:maxDate];
}
if( ![videoWriterInput appendSampleBuffer:sampleBuffer] )
NSLog(@"Unable to write to video input");
}
}
-(void) newAudioSample:(CMSampleBufferRef)sampleBuffer{
if( isRecording ){
if( videoWriter.status > AVAssetWriterStatusWriting ) {
NSLog(@"Warning: writer status is %d", videoWriter.status);
if( videoWriter.status == AVAssetWriterStatusFailed )
NSLog(@"Error: %@", videoWriter.error);
return;
}
while (!audioWriterInput.readyForMoreMediaData) {
NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];
[[NSRunLoop currentRunLoop] runUntilDate:maxDate];
}
if( ![audioWriterInput appendSampleBuffer:sampleBuffer] )
NSLog(@"Unable to write to audio input");
}
}
-(void) startVideoRecording {
if( !isRecording ){
NSLog(@"start video recording...");
if( ![self setupWriter] ) {
NSLog(@"Setup Writer Failed") ;
return;
}
isRecording = YES;
recorded = NO;
}
}
-(void) stopVideoRecording {
if( isRecording ) {
isRecording = NO;
btRecord.hidden = NO;
btRecording.hidden = YES;
[timerToRecord invalidate];
timerToRecord = nil;
// [session stopRunning];
[videoWriter finishWritingWithCompletionHandler:^{
if (videoWriter.status != AVAssetWriterStatusFailed && videoWriter.status == AVAssetWriterStatusCompleted) {
videoWriterInput = nil;
audioWriterInput = nil;
videoWriter = nil;
NSLog(@"finishWriting returned succeful");
recorded = YES;
} else {
NSLog(@"finishWriting returned unsucceful") ;
}
}];
NSLog(@"video recording stopped");
[self performSelector:@selector(openPlayer) withObject:nil afterDelay:0.5];
}
}
当我删除此行时:
while (!audioWriterInput.readyForMoreMediaData) {
NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];
[[NSRunLoop currentRunLoop] runUntilDate:maxDate];
}
我遇到了这个错误:
* 由于未捕获的异常“NSInternalInconsistencyException”而终止应用程序,原因:“* -[AVAssetWriterInput appendSampleBuffer:] 当 readyForMoreMediaData 为 NO 时无法附加示例缓冲区。”
在 iphone 5 中我没有使用这个循环。
我在这里阅读了一些示例,但我不明白如何才能让 iphone 4 中的电影更流畅。
如果有人有关于使用 AVAssetWriter 为 iphone 3gs、iphone 4、iphone 4s 和 iphone 5 制作电影的建议或完整示例,我将非常感谢。
谢谢
最佳答案
在与 AVFoundation 斗争一周后,我找到了一个很好的解决方案。
看完wwdc2012 - session 520我做了一个很好的解决方案。
首先,我使用 AVCaptureMovieFileOutput 和 session presset AVCaptureSessionPreset640x480 录制电影
因此在录制用户选择是否要保存和共享后,只需保存或删除电影即可。
如果用户想要保存/保存和分享,我会分别录制和压缩电影。
我先压缩电影,然后压缩音频和音轨。
查看我的代码:
-(void)exportMediaWithURL:(NSURL *)url location:(CLLocationCoordinate2D)location mirror:(BOOL)mirror{
urlMedia = url;
locationMedia = location;
videoRecorded = NO;
audioRecorded = NO;
asset = [AVAsset assetWithURL:urlMedia];
progressVideo = 0.0;
progressAudio = 0.0;
progressMarge = 0.0;
progressFactor = 3.0;
mirrored = mirror;
limitTime = CMTimeMake(1000*60, 1000);
[asset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler:^() {
NSError *error;
AVKeyValueStatus stats = [asset statusOfValueForKey:@"tracks" error:&error];
if(stats == AVKeyValueStatusLoaded){
if([[asset tracksWithMediaType:AVMediaTypeVideo] count] > 0) video_track = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
if([[asset tracksWithMediaType:AVMediaTypeAudio] count] > 0) audio_track = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
if(!audio_track) progressFactor = 1.0;
if(video_track){
if (CMTimeCompare(asset.duration, limitTime) > 0) {
totalTime = limitTime;
}else{
totalTime = asset.duration;
}
[self exportVideo];
}
}
}];
}
-(void)exportVideo{
NSError *error;
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:asset error:&error];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderOutput *videoOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:video_track outputSettings:videoSettings];
[assetReader addOutput:videoOutput];
assetReader.timeRange = CMTimeRangeMake(kCMTimeZero, totalTime);
// start session to make a movie
if (assetVideoWriter.status == AVAssetWriterStatusUnknown) {
if ([self setupWriterVideo]) {
if ([assetVideoWriter startWriting]) {
[assetVideoWriter startSessionAtSourceTime:kCMTimeZero];
}
}
}
if([assetReader startReading]){
BOOL videoDone = NO;
CMSampleBufferRef bufferVideo;
while (!videoDone) {
if ([assetReader status]== AVAssetReaderStatusReading ) bufferVideo = [videoOutput copyNextSampleBuffer];
if(bufferVideo){
[self newVideoSample:bufferVideo];
CFRelease(bufferVideo);
}else{
videoDone = YES;
}
}
// finish
[videoWriterInput markAsFinished];
[assetVideoWriter finishWritingWithCompletionHandler:^{}];
// gambiarra to resolve the dealloc problem when use a block to delegate something
while (!videoRecorded) {
if (assetVideoWriter.status == AVAssetWriterStatusCompleted) {
videoWriterInput = nil;
assetVideoWriter = nil;
videoRecorded = YES;
if (audio_track) {
[self exportAudio];
}else{
NSMutableDictionary *infoToSend = [NSMutableDictionary new];
[infoToSend setValue:urlOutputVideo forKey:@"url_media"];
[[NSNotificationCenter defaultCenter] postNotificationName:EXPORT_STATUS_DONE object:self userInfo:infoToSend];
}
}
}
}
}
-(void)exportAudio{
NSError *error;
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:asset error:&error];
NSDictionary* audioSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey, nil];
AVAssetReaderOutput *audioOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audio_track outputSettings:audioSettings];
[assetReader addOutput:audioOutput];
assetReader.timeRange = CMTimeRangeMake(kCMTimeZero, totalTime);
// start session to make a movie
if (assetAudioWriter.status == AVAssetWriterStatusUnknown) {
if ([self setupWriterAudio]) {
if ([assetAudioWriter startWriting]) {
[assetAudioWriter startSessionAtSourceTime:kCMTimeZero];
}
}
}
if([assetReader startReading]){
BOOL audioDone = NO;
CMSampleBufferRef bufferAudio;
while (!audioDone) {
if ([assetReader status]== AVAssetReaderStatusReading ) bufferAudio = [audioOutput copyNextSampleBuffer];
if(bufferAudio){
[self newAudioSample:bufferAudio];
CFRelease(bufferAudio);
}else{
audioDone = YES;
}
}
// finish
[audioWriterInput markAsFinished];
[assetAudioWriter finishWritingWithCompletionHandler:^{}];
// gambiarra to resolve the dealloc problem when use a block to delegate something
while (!audioRecorded) {
if (assetAudioWriter.status == AVAssetWriterStatusCompleted) {
audioWriterInput = nil;
assetAudioWriter = nil;
audioRecorded = YES;
[self margeFile];
}
}
}
}
-(void)margeFile{
AVURLAsset *assetVideo = [AVURLAsset assetWithURL:urlOutputVideo];
AVAssetTrack *video_track_marge = [[assetVideo tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVURLAsset *assetAudio = [AVURLAsset assetWithURL:urlOutputAudio];
AVAssetTrack *audio_track_marge = [[assetAudio tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
CMTime startTime = CMTimeMake(1, 1);
CMTimeRange timeRangeVideo = CMTimeRangeMake(kCMTimeZero, assetVideo.duration);
CMTimeRange timeRangeAudio = CMTimeRangeMake(kCMTimeZero, assetAudio.duration);
AVMutableComposition * composition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
if(mirrored) compositionVideoTrack.preferredTransform = CGAffineTransformMakeRotation(M_PI);
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
NSError *error;
[compositionVideoTrack insertTimeRange:timeRangeVideo ofTrack:video_track_marge atTime:startTime error:&error];
[compositionAudioTrack insertTimeRange:timeRangeAudio ofTrack:audio_track_marge atTime:startTime error:&error];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetPassthrough];
exportSession.outputFileType = AVFileTypeAppleM4V;
exportSession.outputURL = [self tempFileURL:media_mixed];
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.metadata = newMetadataArray;
exportSession.timeRange = CMTimeRangeMake(CMTimeMakeWithSeconds(1.0, 600), totalTime);
[exportSession exportAsynchronouslyWithCompletionHandler:^{
NSMutableDictionary *infoToSend = [NSMutableDictionary new];
switch (exportSession.status) {
case AVAssetExportSessionStatusCompleted:
[infoToSend setValue:exportSession.outputURL forKey:@"url_media"];
[[NSNotificationCenter defaultCenter] postNotificationName:EXPORT_STATUS_DONE object:self userInfo:infoToSend];
break;
case AVAssetExportSessionStatusExporting:
[[NSNotificationCenter defaultCenter] postNotificationName:EXPORT_STATUS_EXPORTING object:self];
break;
case AVAssetExportSessionStatusFailed:
NSLog(@"failed");
break;
}
}];
while (exportSession.status == AVAssetExportSessionStatusExporting) {
progressMarge = exportSession.progress;
[self postProgress];
}
}
-(BOOL) setupWriterVideo{
urlOutputVideo = [self tempFileURL:media_video];
NSError *error = nil;
assetVideoWriter = [[AVAssetWriter alloc] initWithURL:urlOutputVideo fileType:AVFileTypeMPEG4 error:&error];
NSParameterAssert(assetVideoWriter);
// Add metadata
NSArray *existingMetadataArray = assetVideoWriter.metadata;
if (existingMetadataArray) {
newMetadataArray = [existingMetadataArray mutableCopy];
} else {
newMetadataArray = [[NSMutableArray alloc] init];
}
AVMutableMetadataItem *mutableItemLocation = [[AVMutableMetadataItem alloc] init];
mutableItemLocation.keySpace = AVMetadataKeySpaceCommon;
mutableItemLocation.key = AVMetadataCommonKeyLocation;
mutableItemLocation.value = [NSString stringWithFormat:@"%+08.4lf%+09.4lf/", locationMedia.latitude, locationMedia.longitude];
AVMutableMetadataItem *mutableItemModel = [[AVMutableMetadataItem alloc] init];
mutableItemModel.keySpace = AVMetadataKeySpaceCommon;
mutableItemModel.key = AVMetadataCommonKeyModel;
mutableItemModel.value = [[UIDevice currentDevice] model];
[newMetadataArray addObject:mutableItemLocation];
[newMetadataArray addObject:mutableItemModel];
assetVideoWriter.metadata = newMetadataArray;
assetVideoWriter.shouldOptimizeForNetworkUse = YES;
videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:[self videoConfiguration]];
NSParameterAssert(videoWriterInput);
videoWriterInput.expectsMediaDataInRealTime = NO;
// add input
[assetVideoWriter addInput:videoWriterInput];
return YES;
}
-(BOOL) setupWriterAudio{
urlOutputAudio = [self tempFileURL:media_audio];
NSError *error = nil;
assetAudioWriter = [[AVAssetWriter alloc] initWithURL:urlOutputAudio fileType:AVFileTypeAppleM4A error:&error];
NSParameterAssert(assetAudioWriter);
audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:[self audioConfiguration]];
audioWriterInput.expectsMediaDataInRealTime = NO;
// add input
[assetAudioWriter addInput:audioWriterInput];
return YES;
}
- (NSDictionary *)videoConfiguration{
// video Configuration
// float bitsPerPixel;
// int numPixels = 640.0 * 360.0;
// int bitsPerSecond;
//
// // Assume that lower-than-SD resolutions are intended for streaming, and use a lower bitrate
// if ( numPixels < (640 * 360.0) )
// bitsPerPixel = 4.05; // This bitrate matches the quality produced by AVCaptureSessionPresetMedium or Low.
// else
// bitsPerPixel = 11.4; // This bitrate matches the quality produced by AVCaptureSessionPresetHigh.
//
// bitsPerSecond = numPixels * bitsPerPixel;
NSDictionary *videoCleanApertureSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:640], AVVideoCleanApertureWidthKey,
[NSNumber numberWithInt:360], AVVideoCleanApertureHeightKey,
[NSNumber numberWithInt:2], AVVideoCleanApertureHorizontalOffsetKey,
[NSNumber numberWithInt:2], AVVideoCleanApertureVerticalOffsetKey,
nil];
NSDictionary *videoAspectRatioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:1],AVVideoPixelAspectRatioHorizontalSpacingKey,
[NSNumber numberWithInt:1],AVVideoPixelAspectRatioVerticalSpacingKey,
nil];
NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:1024000], AVVideoAverageBitRateKey,
[NSNumber numberWithInt:90],AVVideoMaxKeyFrameIntervalKey,
videoCleanApertureSettings, AVVideoCleanApertureKey,
videoAspectRatioSettings, AVVideoPixelAspectRatioKey,
AVVideoProfileLevelH264Main30, AVVideoProfileLevelKey,
nil];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
AVVideoScalingModeResizeAspectFill, AVVideoScalingModeKey,
codecSettings,AVVideoCompressionPropertiesKey,
[NSNumber numberWithInt:640], AVVideoWidthKey,
[NSNumber numberWithInt:360], AVVideoHeightKey,
nil];
return videoSettings;
}
-(NSDictionary *)audioConfiguration{
// Add the audio input
AudioChannelLayout acl;
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
NSDictionary* audioOutputSettings = nil;
// Both type of audio inputs causes output video file to be corrupted.
// if( NO ) {
// should work from iphone 3GS on and from ipod 3rd generation
audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
[ NSNumber numberWithInt: 2 ], AVNumberOfChannelsKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 128000 ], AVEncoderBitRateKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
nil];
// } else {
// // should work on any device requires more space
// audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
// [ NSNumber numberWithInt: kAudioFormatAppleLossless ], AVFormatIDKey,
// [ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
// [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
// [ NSNumber numberWithInt: 2 ], AVNumberOfChannelsKey,
// [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
// nil ];
// }
return audioOutputSettings;
}
-(void) newVideoSample:(CMSampleBufferRef)sampleBuffer{
if( assetVideoWriter.status > AVAssetWriterStatusWriting ) {
if( assetVideoWriter.status == AVAssetWriterStatusFailed )
NSLog(@"Error: %@", assetVideoWriter.error);
return;
}
if (assetVideoWriter.status == AVAssetWriterStatusWriting ) {
while (!videoWriterInput.readyForMoreMediaData) NSLog(@"waitting video");
if (videoWriterInput.readyForMoreMediaData) {
CMTime presTime = CMSampleBufferGetPresentationTimeStamp( sampleBuffer );
float valueLoading = (presTime.value / presTime.timescale);
float valueTotal = (totalTime.value / totalTime.timescale);
progressVideo = valueLoading / valueTotal;
[self postProgress];
if (![videoWriterInput appendSampleBuffer:sampleBuffer]) NSLog(@"Unable to write to video input");
}
}
}
-(void) newAudioSample:(CMSampleBufferRef)sampleBuffer{
if( assetAudioWriter.status > AVAssetWriterStatusWriting ) {
if( assetAudioWriter.status == AVAssetWriterStatusFailed )
NSLog(@"Error: %@", assetAudioWriter.error);
return;
}
if (assetAudioWriter.status == AVAssetWriterStatusWriting ) {
while (!audioWriterInput.readyForMoreMediaData) NSLog(@"waitting audio");
if (audioWriterInput.readyForMoreMediaData) {
CMTime presTime = CMSampleBufferGetPresentationTimeStamp( sampleBuffer );
float valueLoading = (presTime.value / presTime.timescale);
float valueTotal = (totalTime.value / totalTime.timescale);
progressAudio = valueLoading / valueTotal;
[self postProgress];
if (![audioWriterInput appendSampleBuffer:sampleBuffer]) {
NSLog(@"Unable to write to audio input");
}
}
}
}
- (void)postProgress{
float totalProgress = (progressVideo + progressAudio + progressMarge) / progressFactor;
NSMutableDictionary *infoToSend = [NSMutableDictionary new];
[infoToSend setValue:[NSNumber numberWithFloat:totalProgress] forKey:@"progress"];
[[NSNotificationCenter defaultCenter] postNotificationName:EXPORT_STATUS_EXPORTING object:self userInfo:infoToSend];
}
- (NSURL *)tempFileURL:(int)typeMedia {
NSString *outputPath;
switch (typeMedia) {
case media_video:
outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output_export.mp4"];
break;
case media_audio:
outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output_export.m4a"];
break;
case media_mixed:
outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"mixed.mp4"];
break;
}
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath]) [[NSFileManager defaultManager] removeItemAtPath:outputPath error:nil];
return outputURL;
}
- (void) dealloc {
NSLog(@"dealloc video exporter");
[[NSNotificationCenter defaultCenter] removeObserver:self];
assetVideoWriter = nil;
assetAudioWriter = nil;
videoWriterInput = nil;
audioWriterInput = nil;
urlMedia = nil;
urlOutputVideo = nil;
urlOutputAudio = nil;
urlOutputFinal = nil;
}
@end
如果有人要补充,请在这里发帖!
关于ios - 在 iphone 4 中使用 AVAssetWriter 跳帧捕捉电影,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/15603508/
如果我使用 NSUserDefaults 存储应用程序的设置,是否有任何方法可以使我的应用程序的设置“隐藏”,以免显示在 iPhone 上的常规设置应用程序中?我知道还有其他工具,例如 mySetti
我按照该网站的教程进行操作: http://theappleblog.com/2008/08/04/tutorial-build-a-simple-rss-reader-for-iphone/ 为了制
我通过 localisableString 和仅适用于 NSLocale 方法的不同 xib 更改 iPhone 的语言来完成本地化,但应用程序的要求是通过更改应用程序设置 View 中的语言来本地化
我试图弄清楚 iPhone 是否可以通过无线或蓝牙连接到另一台非 iPhone 设备,但发现了相互冲突的信息。我发现的大部分内容都是在 SDK 3.0 版本发布之前发现的,当时这肯定是不可能的。查看堆
This question already has answers here: Closed 7 years ago. Possible Duplicate: How to detect iPhone
当我在模拟器上运行我的应用程序时,每次都会生成白色的空白屏幕。但是当我在底部黑色 iPhone 按钮退出应用程序后,重新进入应用程序后 View 将可见。然后应用程序照常运行。 但是当我将它加载到我的
我的意思是两台 iPhone 设备应该通过蓝牙或 WiFi 连接,并且一台设备的 UI 应该扩展到另一台 iPhone 设备(不共享屏幕)。我们有办法在 iOS 中执行此操作吗? 最佳答案 Bump
如何通过宏检测设备型号?我使用过类似的东西,但模拟器上的结果总是 IS_IPHONE_5 #define IS_IPAD (UI_USER_INTERFACE_IDIOM() == UIUserInt
我目前有一个应用程序要求用户维护 VPN 隧道。加载时我检查 VPN 隧道是否可用。 我想知道是否有任何方法可以显示 UIAlertView,单击“确定”后,用户将进入 iPhone 主设置屏幕,以便
我正在开发一个 iPhone 客户端应用程序,它允许用户对各种服务进行评分。无需注册或登录。 要求是用户不能重复对服务进行评分(尽管可以更改其评分)。从目前的情况来看,该应用程序可以被删除、重新安装,
比如说,我点击一个 iphone 应用程序图标,启动时它将创建一个 .app 文件。那么是否可以从该应用程序调用另一个 iphone 应用程序。或者我们可以在该 .app 文件中执行一些操作,例如它将
真的有可能让iPhone静音模式独立于iPhone App吗? 这个应用程序"Talking Carl"让我很困惑。我的 iPhone 处于静音模式。每当我打开这个应用程序时。应用程序声音处于开启模式
这个问题不太可能对任何 future 的访客有帮助;它只与一个较小的地理区域、一个特定的时间点或一个非常狭窄的情况相关,通常不适用于全世界的互联网受众。如需帮助使此问题更广泛适用,visit the
如果我想编写一些自定义 iPhone 应用程序,但不一定通过 App Store 分发它们,是否可以在不加入 iPhone 开发者计划的情况下实现? 假设我只是想为自己编写一些小实用程序并将其放入我的
人们对 Unity 或 Torque Engine 等游戏引擎有何看法和/或体验?如果您是 iPhone 游戏开发新手,是否值得学习其中一种引擎?这些引擎生成的应用程序与使用 sdk 的 native
您能否在未安装 XCode 开发工具的计算机上分发 iPhone 应用程序以在 iPhone 模拟器中进行测试?可以直接在电脑上安装模拟器进行测试吗? 我有一组测试人员,他们不是开发人员,除了能够运行
我想在我的越狱设备上构建并安装我的应用程序,而无需支付 iPhone 开发者计划所需的 99 美元。我有 Rock 和 Cydia...最简单的方法是什么(如果可能的话)? 谢谢。 最佳答案 这是一个
我想测试一个网站,看看它如何与 iPhone 配合使用,但我没有 iPhone 或 iPod touch。有没有一种方法可以让我在不拥有网站的情况下测试网站的运行情况? 我真正想要的是修复 Stack
简单地说;我的 99 美元能给我带来什么我无法免费获得的东西? 好吧,好吧,听起来是个愚蠢的问题,但苹果网站对我来说并不清楚。 我的预感是,您可以在 99 岁时向应用程序商店提交应用程序,但您可以免费
我是一名注册的 iOS 开发人员。如何将我的 iPhone 应用程序转移到我的个人 iPhone? 最佳答案 用于测试?只需选择您的设备而不是模拟器即可。 关于iphone - 如何将我的 iPhon
我是一名优秀的程序员,十分优秀!