gpt4 book ai didi

iphone - 使用 AVCapture 分别但同时写入视频和音频

转载 作者:搜寻专家 更新时间:2023-10-30 19:49:33 24 4
gpt4 key购买 nike

我想分别捕获视频和音频,但同时捕获,所以最后我的 NSDocuments 文件夹中有一个 .mp4 文件和一个 .wav(或类似的音频格式)。到目前为止我有这个,但它甚至没有调用 - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection

知道这里出了什么问题吗?

#import "VideoCaptureViewController.h"
#import <AssetsLibrary/AssetsLibrary.h>
#import "SMFileManager.h"



@interface VideoCaptureViewController () {

AVCaptureSession *session_;
AVCaptureDevice *captureDevice_;
AVCaptureDeviceInput *deviceInput_;
AVCaptureMovieFileOutput *captureMovieFileOutput_;
BOOL recording_;

AVCaptureAudioDataOutput *audioDataOutput_;


NSString *filePathCapturedVideo_;

}

- (void) toggleRecording;
- (void) endRecording;

@end

@implementation VideoCaptureViewController
@synthesize delegate;
@synthesize previewLayer;

- (void)viewDidLoad
{
[super viewDidLoad];


session_ = [[AVCaptureSession alloc] init];

// Add video input.
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
// AVCaptureDevice *captureDevice = nil;
captureDevice_ = nil;
for (AVCaptureDevice *device in videoDevices) {
if (device.position == AVCaptureDevicePositionFront) {
captureDevice_ = device;
break;
}
}

// AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (captureDevice_) {
NSError *error;
deviceInput_ = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice_ error:&error];
if (!error){
if ([session_ canAddInput:deviceInput_])
[session_ addInput:deviceInput_];
else
NSLog(@"Couldn't add video input");
} else {
NSLog(@"Couldn't create video input");
}
} else {
NSLog(@"Couldn't create video capture device");
}


//ADD AUDIO INPUT
NSLog(@"Adding audio input");
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];

if (audioInput) {
[session_ addInput:audioInput];
}

//----- ADD OUTPUTS -----

// audio
audioDataOutput_= [[AVCaptureAudioDataOutput alloc] init];
[session_ addOutput:audioDataOutput_];

dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL);
[audioDataOutput_ setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);

//ADD VIDEO PREVIEW LAYER
NSLog(@"Adding video preview layer");
[self setPreviewLayer:[[[AVCaptureVideoPreviewLayer alloc] initWithSession:session_] autorelease]];
previewLayer.orientation = AVCaptureVideoOrientationPortrait; //<<SET ORIENTATION. You can deliberatly set this wrong to flip the image and may actually need to set it wrong to get the right image
[[self previewLayer] setVideoGravity:AVLayerVideoGravityResizeAspectFill];


//ADD MOVIE FILE OUTPUT
NSLog(@"Adding movie file output");
captureMovieFileOutput_ = [[AVCaptureMovieFileOutput alloc] init];

// Float64 TotalSeconds = 60; //Total seconds
// int32_t preferredTimeScale = CAPTURE_FRAMES_PER_SECOND; //Frames per second
// CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale); //<<SET MAX DURATION
// captureMovieFileOutput_.maxRecordedDuration = maxDuration;

captureMovieFileOutput_.minFreeDiskSpaceLimit = 1024 * 1024; //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME

if ([session_ canAddOutput:captureMovieFileOutput_])
[session_ addOutput:captureMovieFileOutput_];

//SET THE CONNECTION PROPERTIES (output properties)
[self CameraSetOutputProperties]; //(We call a method as it also has to be done after changing camera)



//----- SET THE IMAGE QUALITY / RESOLUTION -----
//Options:
// AVCaptureSessionPresetHigh - Highest recording quality (varies per device)
// AVCaptureSessionPresetMedium - Suitable for WiFi sharing (actual values may change)
// AVCaptureSessionPresetLow - Suitable for 3G sharing (actual values may change)
// AVCaptureSessionPreset640x480 - 640x480 VGA (check its supported before setting it)
// AVCaptureSessionPreset1280x720 - 1280x720 720p HD (check its supported before setting it)
// AVCaptureSessionPresetPhoto - Full photo resolution (not supported for video output)
NSLog(@"Setting image quality");
[session_ setSessionPreset:AVCaptureSessionPresetHigh];
if ([session_ canSetSessionPreset:AVCaptureSessionPreset1280x720]) { //Check size based configs are supported before setting them
NSLog(@"1280x720 confirmed!");
[session_ setSessionPreset:AVCaptureSessionPreset1280x720];
}


//----- DISPLAY THE PREVIEW LAYER -----
//Display it full screen under out view controller existing controls
NSLog(@"Display the preview layer");
CGRect layerRect = [[[self view] layer] bounds];
[previewLayer setBounds:layerRect];
[previewLayer setPosition:CGPointMake(CGRectGetMidX(layerRect), CGRectGetMidY(layerRect))];
//[[[self view] layer] addSublayer:[[self CaptureManager] previewLayer]];
//We use this instead so it goes on a layer behind our UI controls (avoids us having to manually bring each control to the front):
UIView *CameraView = [[[UIView alloc] init] autorelease];
[[self view] addSubview:CameraView];
[self.view sendSubviewToBack:CameraView];

[[CameraView layer] addSublayer:previewLayer];


//----- START THE CAPTURE SESSION RUNNING -----
[session_ startRunning];
}

- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
return (interfaceOrientation == UIDeviceOrientationPortrait);
}


//********** VIEW WILL APPEAR **********
//View about to be added to the window (called each time it appears)
//Occurs after other view's viewWillDisappear
- (void)viewWillAppear:(BOOL)animated
{
NSLog(@"%s",__func__);
[super viewWillAppear:animated];
recording_ = NO;
}

- (void) viewDidAppear:(BOOL)animated
{
NSLog(@"%s",__func__);

UIButton *startStopButton = [UIButton buttonWithType:UIButtonTypeCustom];
startStopButton.frame = CGRectMake(self.view.center.x, self.view.frame.size.height - 80, 100, 50);
[startStopButton setTitle:@"record" forState:UIControlStateNormal];
[startStopButton addTarget:self action:@selector(toggleRecording) forControlEvents:UIControlEventTouchUpInside];
startStopButton.enabled = YES;
startStopButton.backgroundColor = [UIColor grayColor];
[self.view addSubview:startStopButton];


// [NSTimer timerWithTimeInterval:5 target:self selector:@selector(toggleRecording) userInfo:nil repeats:NO];
// [NSTimer scheduledTimerWithTimeInterval:5 target:self selector:@selector(endRecording) userInfo:nil repeats:NO];
}

- (void) endRecording
{
[self dismissModalViewControllerAnimated:YES];
}



//********** CAMERA SET OUTPUT PROPERTIES **********
- (void) CameraSetOutputProperties
{
//SET THE CONNECTION PROPERTIES (output properties)
AVCaptureConnection *CaptureConnection = [captureMovieFileOutput_ connectionWithMediaType:AVMediaTypeVideo];

//Set landscape (if required)
if ([CaptureConnection isVideoOrientationSupported]){
// [CaptureConnecti on setVideoOrientation:[UIDevice currentDevice].orientation];

AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
[CaptureConnection setVideoOrientation:orientation];
}

//Set frame rate (if requried)
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);

if (CaptureConnection.supportsVideoMinFrameDuration)
CaptureConnection.videoMinFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
if (CaptureConnection.supportsVideoMaxFrameDuration)
CaptureConnection.videoMaxFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);

CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
}

//********** GET CAMERA IN SPECIFIED POSITION IF IT EXISTS **********
- (AVCaptureDevice *) CameraWithPosition:(AVCaptureDevicePosition) Position
{
NSArray *Devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *Device in Devices)
{
if ([Device position] == Position)
{
return Device;
}
}
return nil;
}



//********** CAMERA TOGGLE **********
- (IBAction)CameraToggleButtonPressed:(id)sender
{
if ([[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count] > 1) //Only do if device has multiple cameras
{
NSLog(@"Toggle camera");
NSError *error;
//AVCaptureDeviceInput *videoInput = [self videoInput];
AVCaptureDeviceInput *NewVideoInput;
AVCaptureDevicePosition position = [[deviceInput_ device] position];
if (position == AVCaptureDevicePositionBack)
{
NewVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self CameraWithPosition:AVCaptureDevicePositionFront] error:&error];
}
else if (position == AVCaptureDevicePositionFront){
NewVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self CameraWithPosition:AVCaptureDevicePositionBack] error:&error];
}

if (NewVideoInput != nil)
{
[session_ beginConfiguration]; //We can now change the inputs and output configuration. Use commitConfiguration to end
[session_ removeInput:deviceInput_];
if ([session_ canAddInput:NewVideoInput])
{
[session_ addInput:NewVideoInput];
deviceInput_ = NewVideoInput;
}
else
{
[session_ addInput:deviceInput_];
}

//Set the connection properties again
[self CameraSetOutputProperties];


[session_ commitConfiguration];
[NewVideoInput release];
}
}
}




- (void)toggleRecording
{
NSLog(@"%s",__func__);
if (!recording_){
NSLog(@"START RECORDING");
recording_ = YES;

NSString* documentsDirectory= [SMFileManager applicationDocumentsDirectory];
filePathCapturedVideo_ = [documentsDirectory stringByAppendingPathComponent:@"captured-video.mp4"];
NSLog(@"storing file at %@", filePathCapturedVideo_);
NSURL *url = [[NSURL alloc] initFileURLWithPath: filePathCapturedVideo_];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:filePathCapturedVideo_]) {
NSError *error;
if ([fileManager removeItemAtPath:filePathCapturedVideo_ error:&error] == NO) {
//Error - handle if requried
}
}
//Start recording
[captureMovieFileOutput_ startRecordingToOutputFileURL:url recordingDelegate:self];
} else {
NSLog(@"STOP RECORDING");
recording_ = NO;
[captureMovieFileOutput_ stopRecording];
}
}


- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
NSLog(@"%s",__func__);

}

- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{

NSLog(@"%@", outputFileURL);

NSLog(@"%@", captureOutput);


NSString* documentsDirectory= [SMFileManager applicationDocumentsDirectory];
filePathCapturedVideo_ = [documentsDirectory stringByAppendingPathComponent:@"captured-video.mp4"];
[self.delegate videoCaptured:filePathCapturedVideo_];
}


@end

最佳答案

我遇到过类似的问题,我断定不可能同时使用这两个委托(delegate)。最好的解决方案是只使用 DidOutputSampleBuffer 和 AVAssetWriter 来单独保存文件。

关于iphone - 使用 AVCapture 分别但同时写入视频和音频,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/12875763/

24 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com