- Java 双重比较
- java - 比较器与 Apache BeanComparator
- Objective-C 完成 block 导致额外的方法调用?
- database - RESTful URI 是否应该公开数据库主键?
我想分别捕获视频和音频,但同时捕获,所以最后我的 NSDocuments 文件夹中有一个 .mp4 文件和一个 .wav(或类似的音频格式)。到目前为止我有这个,但它甚至没有调用 - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
。
知道这里出了什么问题吗?
#import "VideoCaptureViewController.h"
#import <AssetsLibrary/AssetsLibrary.h>
#import "SMFileManager.h"
@interface VideoCaptureViewController () {
AVCaptureSession *session_;
AVCaptureDevice *captureDevice_;
AVCaptureDeviceInput *deviceInput_;
AVCaptureMovieFileOutput *captureMovieFileOutput_;
BOOL recording_;
AVCaptureAudioDataOutput *audioDataOutput_;
NSString *filePathCapturedVideo_;
}
- (void) toggleRecording;
- (void) endRecording;
@end
@implementation VideoCaptureViewController
@synthesize delegate;
@synthesize previewLayer;
- (void)viewDidLoad
{
[super viewDidLoad];
session_ = [[AVCaptureSession alloc] init];
// Add video input.
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
// AVCaptureDevice *captureDevice = nil;
captureDevice_ = nil;
for (AVCaptureDevice *device in videoDevices) {
if (device.position == AVCaptureDevicePositionFront) {
captureDevice_ = device;
break;
}
}
// AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (captureDevice_) {
NSError *error;
deviceInput_ = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice_ error:&error];
if (!error){
if ([session_ canAddInput:deviceInput_])
[session_ addInput:deviceInput_];
else
NSLog(@"Couldn't add video input");
} else {
NSLog(@"Couldn't create video input");
}
} else {
NSLog(@"Couldn't create video capture device");
}
//ADD AUDIO INPUT
NSLog(@"Adding audio input");
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
if (audioInput) {
[session_ addInput:audioInput];
}
//----- ADD OUTPUTS -----
// audio
audioDataOutput_= [[AVCaptureAudioDataOutput alloc] init];
[session_ addOutput:audioDataOutput_];
dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL);
[audioDataOutput_ setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
//ADD VIDEO PREVIEW LAYER
NSLog(@"Adding video preview layer");
[self setPreviewLayer:[[[AVCaptureVideoPreviewLayer alloc] initWithSession:session_] autorelease]];
previewLayer.orientation = AVCaptureVideoOrientationPortrait; //<<SET ORIENTATION. You can deliberatly set this wrong to flip the image and may actually need to set it wrong to get the right image
[[self previewLayer] setVideoGravity:AVLayerVideoGravityResizeAspectFill];
//ADD MOVIE FILE OUTPUT
NSLog(@"Adding movie file output");
captureMovieFileOutput_ = [[AVCaptureMovieFileOutput alloc] init];
// Float64 TotalSeconds = 60; //Total seconds
// int32_t preferredTimeScale = CAPTURE_FRAMES_PER_SECOND; //Frames per second
// CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale); //<<SET MAX DURATION
// captureMovieFileOutput_.maxRecordedDuration = maxDuration;
captureMovieFileOutput_.minFreeDiskSpaceLimit = 1024 * 1024; //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME
if ([session_ canAddOutput:captureMovieFileOutput_])
[session_ addOutput:captureMovieFileOutput_];
//SET THE CONNECTION PROPERTIES (output properties)
[self CameraSetOutputProperties]; //(We call a method as it also has to be done after changing camera)
//----- SET THE IMAGE QUALITY / RESOLUTION -----
//Options:
// AVCaptureSessionPresetHigh - Highest recording quality (varies per device)
// AVCaptureSessionPresetMedium - Suitable for WiFi sharing (actual values may change)
// AVCaptureSessionPresetLow - Suitable for 3G sharing (actual values may change)
// AVCaptureSessionPreset640x480 - 640x480 VGA (check its supported before setting it)
// AVCaptureSessionPreset1280x720 - 1280x720 720p HD (check its supported before setting it)
// AVCaptureSessionPresetPhoto - Full photo resolution (not supported for video output)
NSLog(@"Setting image quality");
[session_ setSessionPreset:AVCaptureSessionPresetHigh];
if ([session_ canSetSessionPreset:AVCaptureSessionPreset1280x720]) { //Check size based configs are supported before setting them
NSLog(@"1280x720 confirmed!");
[session_ setSessionPreset:AVCaptureSessionPreset1280x720];
}
//----- DISPLAY THE PREVIEW LAYER -----
//Display it full screen under out view controller existing controls
NSLog(@"Display the preview layer");
CGRect layerRect = [[[self view] layer] bounds];
[previewLayer setBounds:layerRect];
[previewLayer setPosition:CGPointMake(CGRectGetMidX(layerRect), CGRectGetMidY(layerRect))];
//[[[self view] layer] addSublayer:[[self CaptureManager] previewLayer]];
//We use this instead so it goes on a layer behind our UI controls (avoids us having to manually bring each control to the front):
UIView *CameraView = [[[UIView alloc] init] autorelease];
[[self view] addSubview:CameraView];
[self.view sendSubviewToBack:CameraView];
[[CameraView layer] addSublayer:previewLayer];
//----- START THE CAPTURE SESSION RUNNING -----
[session_ startRunning];
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
return (interfaceOrientation == UIDeviceOrientationPortrait);
}
//********** VIEW WILL APPEAR **********
//View about to be added to the window (called each time it appears)
//Occurs after other view's viewWillDisappear
- (void)viewWillAppear:(BOOL)animated
{
NSLog(@"%s",__func__);
[super viewWillAppear:animated];
recording_ = NO;
}
- (void) viewDidAppear:(BOOL)animated
{
NSLog(@"%s",__func__);
UIButton *startStopButton = [UIButton buttonWithType:UIButtonTypeCustom];
startStopButton.frame = CGRectMake(self.view.center.x, self.view.frame.size.height - 80, 100, 50);
[startStopButton setTitle:@"record" forState:UIControlStateNormal];
[startStopButton addTarget:self action:@selector(toggleRecording) forControlEvents:UIControlEventTouchUpInside];
startStopButton.enabled = YES;
startStopButton.backgroundColor = [UIColor grayColor];
[self.view addSubview:startStopButton];
// [NSTimer timerWithTimeInterval:5 target:self selector:@selector(toggleRecording) userInfo:nil repeats:NO];
// [NSTimer scheduledTimerWithTimeInterval:5 target:self selector:@selector(endRecording) userInfo:nil repeats:NO];
}
- (void) endRecording
{
[self dismissModalViewControllerAnimated:YES];
}
//********** CAMERA SET OUTPUT PROPERTIES **********
- (void) CameraSetOutputProperties
{
//SET THE CONNECTION PROPERTIES (output properties)
AVCaptureConnection *CaptureConnection = [captureMovieFileOutput_ connectionWithMediaType:AVMediaTypeVideo];
//Set landscape (if required)
if ([CaptureConnection isVideoOrientationSupported]){
// [CaptureConnecti on setVideoOrientation:[UIDevice currentDevice].orientation];
AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
[CaptureConnection setVideoOrientation:orientation];
}
//Set frame rate (if requried)
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
if (CaptureConnection.supportsVideoMinFrameDuration)
CaptureConnection.videoMinFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
if (CaptureConnection.supportsVideoMaxFrameDuration)
CaptureConnection.videoMaxFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
}
//********** GET CAMERA IN SPECIFIED POSITION IF IT EXISTS **********
- (AVCaptureDevice *) CameraWithPosition:(AVCaptureDevicePosition) Position
{
NSArray *Devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *Device in Devices)
{
if ([Device position] == Position)
{
return Device;
}
}
return nil;
}
//********** CAMERA TOGGLE **********
- (IBAction)CameraToggleButtonPressed:(id)sender
{
if ([[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count] > 1) //Only do if device has multiple cameras
{
NSLog(@"Toggle camera");
NSError *error;
//AVCaptureDeviceInput *videoInput = [self videoInput];
AVCaptureDeviceInput *NewVideoInput;
AVCaptureDevicePosition position = [[deviceInput_ device] position];
if (position == AVCaptureDevicePositionBack)
{
NewVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self CameraWithPosition:AVCaptureDevicePositionFront] error:&error];
}
else if (position == AVCaptureDevicePositionFront){
NewVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self CameraWithPosition:AVCaptureDevicePositionBack] error:&error];
}
if (NewVideoInput != nil)
{
[session_ beginConfiguration]; //We can now change the inputs and output configuration. Use commitConfiguration to end
[session_ removeInput:deviceInput_];
if ([session_ canAddInput:NewVideoInput])
{
[session_ addInput:NewVideoInput];
deviceInput_ = NewVideoInput;
}
else
{
[session_ addInput:deviceInput_];
}
//Set the connection properties again
[self CameraSetOutputProperties];
[session_ commitConfiguration];
[NewVideoInput release];
}
}
}
- (void)toggleRecording
{
NSLog(@"%s",__func__);
if (!recording_){
NSLog(@"START RECORDING");
recording_ = YES;
NSString* documentsDirectory= [SMFileManager applicationDocumentsDirectory];
filePathCapturedVideo_ = [documentsDirectory stringByAppendingPathComponent:@"captured-video.mp4"];
NSLog(@"storing file at %@", filePathCapturedVideo_);
NSURL *url = [[NSURL alloc] initFileURLWithPath: filePathCapturedVideo_];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:filePathCapturedVideo_]) {
NSError *error;
if ([fileManager removeItemAtPath:filePathCapturedVideo_ error:&error] == NO) {
//Error - handle if requried
}
}
//Start recording
[captureMovieFileOutput_ startRecordingToOutputFileURL:url recordingDelegate:self];
} else {
NSLog(@"STOP RECORDING");
recording_ = NO;
[captureMovieFileOutput_ stopRecording];
}
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
NSLog(@"%s",__func__);
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
NSLog(@"%@", outputFileURL);
NSLog(@"%@", captureOutput);
NSString* documentsDirectory= [SMFileManager applicationDocumentsDirectory];
filePathCapturedVideo_ = [documentsDirectory stringByAppendingPathComponent:@"captured-video.mp4"];
[self.delegate videoCaptured:filePathCapturedVideo_];
}
@end
最佳答案
我遇到过类似的问题,我断定不可能同时使用这两个委托(delegate)。最好的解决方案是只使用 DidOutputSampleBuffer 和 AVAssetWriter 来单独保存文件。
关于iphone - 使用 AVCapture 分别但同时写入视频和音频,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/12875763/
我有这个代码 var myChart = new FusionCharts("../themes/clean/charts/hbullet.swf", "myChartId", "400", "75
既然写入是立即进行的(复制到内核缓冲区并返回),那么使用 io_submit 进行写入有什么好处? 事实上,它 (aio/io_submit) 看起来更糟,因为您必须在堆上分配写入缓冲区并且不能使用基
我正在使用 mootool 的 Request.JSON 从 Twitter 检索推文。收到它后,我将写入目标 div 的 .innerHTML 属性。当我在本地将其作为文件进行测试时,即 file:
最终,我想将 Vertica DB 中的数据抓取到 Spark 中,训练机器学习模型,进行预测,并将这些预测存储到另一个 Vertica DB 中。 当前的问题是确定流程最后部分的瓶颈:将 Spark
我使用 WEKA 库编写了一个 Java 程序, 训练分类算法 使用经过训练的算法对未标记的数据集运行预测 将结果写入 .csv 文件 问题在于它当前写出离散分类结果(即算法猜测一行属于哪个类别)。我
背景 - 我正在考虑使用 clickonce 通过 clickonce(通过网站)部署 WinForms 应用程序。相对简单的应用程序的要素是: - 它是一个可执行文件和一个数据库文件(sqlite)
是否有更好的解决方案来快速初始化 C 数组(在堆上创建)?就像我们使用大括号一样 double** matrix_multiply(const double **l_matrix, const dou
我正在读取 JSON 文件,取出值并进行一些更改。 基本上我向数组添加了一些值。之后我想将其写回到文件中。当我将 JSONArray 写回文件时,会被写入字符串而不是 JSONArray 对象。怎样才
我为两个应用程序使用嵌入式数据库,其中一个是服务器,另一个是客户端。客户端应用程序。可以向服务器端发送获取数据请求以检索数据并显示在表格(或其他)中。问题是这样的:如何将获取的数据保存(写入)到页面文
是否有更好的解决方案来快速初始化 C 数组(在堆上创建)?就像我们使用大括号一样 double** matrix_multiply(const double **l_matrix, const dou
从问题得出问题:找到所有 result = new ArrayList(); for (int i = 2; i >(i%8) & 0x1) == 0) { result.add(i
由于某种原因,它没有写入 CSV。谁能明白为什么它不写吗? def main(): list_of_emails = read_email_csv() #read input file, cr
关闭。 这个问题是 not reproducible or was caused by typos 。它目前不接受答案。 这个问题是由于错别字或无法再重现的问题引起的。虽然类似的问题可能在这里出现,
我目前正在开发一个保存和加载程序,但我无法获得正确的结果。 编写程序: #include #include #define FILENAME "Save" #define COUNT 6 type
import java.io.*; public class Main2 { public static void main(String[] args) throws Exception {
我需要使用预定义位置字符串“Office”从所有日历中检索所有 iOS 事件,然后将结果写入 NSLog 和 UITextView。 到目前为止,这是我的代码: #import "ViewCo
我正在尝试将 BOOL 值写入 PFInstallation 中的列,但会不停地崩溃: - (IBAction)pushSwitch:(id)sender { NSUserDefaults *push
我以前在学校学过一些简单的数据库编程,但现在我正在尝试学习最佳实践,因为我正在编写更复杂的应用程序。写入 MySQL 数据库并不难,但我想知道让分布式应用程序写入 Amazon EC2 上的远程数据库
是否可以写回到ResourceBundle?目前我正在使用 ResourceBundle 来存储信息,在运行时使用以下内容读取信息 while(ResourceBundle.getBundle("bu
关闭。这个问题是not reproducible or was caused by typos .它目前不接受答案。 这个问题是由于错别字或无法再重现的问题引起的。虽然类似的问题可能是on-topi
我是一名优秀的程序员,十分优秀!