- html - 出于某种原因,IE8 对我的 Sass 文件中继承的 html5 CSS 不友好?
- JMeter 在响应断言中使用 span 标签的问题
- html - 在 :hover and :active? 上具有不同效果的 CSS 动画
- html - 相对于居中的 html 内容固定的 CSS 重复背景?
如何在iOS 8或什至iOS 7(如果可能)上进行录制并在相机之间切换?我见过一些应用程序从前置摄像头进行录制,然后切换到后置摄像头,当胶卷完成后,您可以看到两个摄像头都已将其完全拍摄了吗?我认为Glide应用程序有它,也许还有Snapchat(不确定最后一个)
最佳答案
经过长时间的搜索和努力,下面是代码...感谢您的建议Matic将我指向正确的道路,我做了一些改进,但并未最终合并文件
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import <CoreGraphics/CoreGraphics.h>
@interface ViewController : UIViewController <AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate>
{
IBOutlet UIView *viewCanvasRecording;
IBOutlet UIButton *btnRecord;
AVCaptureDevice *frontCamera; // A pointer to the front camera
AVCaptureDevice *backCamera; // A pointer to the back camera
AVCaptureDeviceInput *captureFrontInput;
AVCaptureDeviceInput *captureBackInput;
__block AVAssetWriter *_assetWriter;
__block AVAssetWriterInput *_videoWriterInput;
__block AVAssetWriterInput *_audioWriterInput;
__block AVCaptureVideoDataOutput *videoOutput;
dispatch_queue_t _captureQueue;
BOOL currentFrontCamera;
BOOL isCapturingInput;
NSURL *recordingFile;
}
@property (nonatomic, strong) AVCaptureSession *captureSession;
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *captureVideoPreviewLayer;
@end
// ===== now the m file
//
// ViewController.m
// record
//
// Created by Catalin on 31/05/15.
// Copyright (c) 2015 lamobratory.com. All rights reserved.
//
#import "ViewController.h"
#import <AssetsLibrary/AssetsLibrary.h>
#import <MediaPlayer/MediaPlayer.h>
@implementation ViewController
- (void)viewDidLoad
{
[super viewDidLoad];
currentFrontCamera=NO;
isCapturingInput=NO;
_assetWriter=nil;
[self findCamera:YES]; // init the front camera
[self findCamera:NO]; // init the back camera
[self performSelector:@selector(initCaptureWithCamera) withObject:nil afterDelay:1];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
}
-(IBAction)tapStartRecord:(id)sender
{
if([[btnRecord titleForState:UIControlStateNormal] isEqualToString:@"START"])
{
[btnRecord setTitle:@"STOP" forState:UIControlStateNormal];
isCapturingInput=YES;
}
else if([[btnRecord titleForState:UIControlStateNormal] isEqualToString:@"STOP"])
{
isCapturingInput=NO;
dispatch_async(_captureQueue, ^{
[_assetWriter finishWritingWithCompletionHandler:^{
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:recordingFile completionBlock:^(NSURL *assetURL, NSError *error)
{
if (error)
{
NSLog(@"assets library failed (%@)", error);
}
else
{
NSLog(@"file saved to library");
}
[self.captureSession stopRunning];
_assetWriter=nil;
recordingFile = nil;
}];
}];
});
[btnRecord setTitle:@"START" forState:UIControlStateNormal];
}
}
-(IBAction)tapSwitchCamera:(id)sender
{
// switch outputs
[self swipeCamera];
}
-(void)swipeCamera
{
currentFrontCamera=!currentFrontCamera; // swipe camera
[self.captureSession beginConfiguration];
[self.captureSession removeInput:captureBackInput];
[self.captureSession removeInput:captureFrontInput];
if(!currentFrontCamera)
[self.captureSession addInput:captureBackInput];
else
[self.captureSession addInput:captureFrontInput];
[self.captureSession commitConfiguration];
}
#pragma mark - Camera methods
-(BOOL)findCamera:(BOOL)useFrontCamera
{
// 0. Make sure we initialize our camera pointer:
AVCaptureDevice *m_camera = NULL;
// 1. Get a list of available devices:
// specifying AVMediaTypeVideo will ensure we only get a list of cameras, no microphones
NSArray * devices = [ AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo ];
// 2. Iterate through the device array and if a device is a camera, check if it's the one we want:
for ( AVCaptureDevice * device in devices )
{
if ( useFrontCamera && AVCaptureDevicePositionFront == [ device position ] )
{
// We asked for the front camera and got the front camera, now keep a pointer to it:
m_camera = device;
}
else if ( !useFrontCamera && AVCaptureDevicePositionBack == [ device position ] )
{
// We asked for the back camera and here it is:
m_camera = device;
}
}
// 3. Set a frame rate for the camera:
if ( NULL != m_camera )
{
// We firt need to lock the camera, so noone else can mess with its configuration:
if ( [ m_camera lockForConfiguration: NULL ] )
{
// Set a minimum frame rate of 10 frames per second
[ m_camera setActiveVideoMinFrameDuration: CMTimeMake( 1, 10 ) ];
// and a maximum of 30 frames per second
[ m_camera setActiveVideoMaxFrameDuration: CMTimeMake( 1, 30 ) ];
[ m_camera unlockForConfiguration ];
}
}
if(!useFrontCamera)
backCamera=m_camera;
else
frontCamera=m_camera;
// 4. If we've found the camera we want, return true
return ( NULL != m_camera );
}
-(void) setupWriter
{
NSError *error = nil;
_assetWriter = [[AVAssetWriter alloc] initWithURL:recordingFile fileType:AVFileTypeQuickTimeMovie error:&error];
NSDictionary* actual = videoOutput.videoSettings;
int _cy = [[actual objectForKey:@"Height"] intValue];
int _cx = [[actual objectForKey:@"Width"] intValue];
NSDictionary* settings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt: _cx], AVVideoWidthKey,
[NSNumber numberWithInt: _cy], AVVideoHeightKey,
nil];
_videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:settings];
_videoWriterInput.expectsMediaDataInRealTime = YES;
_videoWriterInput.transform=CGAffineTransformMakeRotation(M_PI/2); // else it will shot in landscape even though we hold our phone in portrait mode
/*settings = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
[ NSNumber numberWithInt: 1], AVNumberOfChannelsKey,
[ NSNumber numberWithFloat: 16000], AVSampleRateKey,
[ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
nil];*/
_audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeAudio outputSettings: nil];
_audioWriterInput.expectsMediaDataInRealTime = YES;
// add input
if([_assetWriter canAddInput:_videoWriterInput])
{
NSLog(@"added output to video");
[_assetWriter addInput:_videoWriterInput];
}
if([_assetWriter canAddInput:_audioWriterInput])
{
NSLog(@"added output to audio");
[_assetWriter addInput:_audioWriterInput];
}
}
- (void)initCaptureWithCamera
{
if(self.captureVideoPreviewLayer!=nil) // refresh the views
[self.captureVideoPreviewLayer removeFromSuperlayer];
// remove old file at path if exists
recordingFile = [NSURL fileURLWithPath:[NSString stringWithFormat:@"%@/tmp_vid.mov", NSTemporaryDirectory()]];
[[NSFileManager defaultManager] removeItemAtURL:recordingFile error:nil];
// ========================= configure session
self.captureSession = [[AVCaptureSession alloc] init];
NSString* preset = 0;
if (!preset) {
preset = AVCaptureSessionPresetHigh;
}
self.captureSession.sessionPreset = preset;
// ========================= input devices from camera and mic
NSError * error = NULL;
captureFrontInput = [AVCaptureDeviceInput deviceInputWithDevice:frontCamera error: &error];
captureBackInput = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error: &error];
if ( NULL != error )
return;
if ([self.captureSession canAddInput:captureBackInput])
{
NSLog(@"added input from camera");
[self.captureSession addInput:captureBackInput];
}
// audio input from default mic
AVCaptureDevice* mic = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput* micinput = [AVCaptureDeviceInput deviceInputWithDevice:mic error:nil];
if ([self.captureSession canAddInput:micinput])
{
NSLog(@"added input from mic");
[self.captureSession addInput:micinput];
}
// ========================= now output forms: video and audio in asset writter
_captureQueue = dispatch_queue_create("com.myapp.capture", DISPATCH_QUEUE_SERIAL);
videoOutput = [[AVCaptureVideoDataOutput alloc] init];
AVCaptureAudioDataOutput *audioOutput = [[AVCaptureAudioDataOutput alloc] init];
[videoOutput setSampleBufferDelegate:self queue:_captureQueue];
[audioOutput setSampleBufferDelegate:self queue:_captureQueue];
// NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
// NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
// NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
//
// [videoOutput setVideoSettings:videoSettings];
if ([self.captureSession canAddOutput:videoOutput]) {
[self.captureSession addOutput:videoOutput];
}
if ([self.captureSession canAddOutput:audioOutput]) {
[self.captureSession addOutput:audioOutput];
}
// ======================================================================
// add the preview layer to see what we film
if (!self.captureVideoPreviewLayer)
self.captureVideoPreviewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
// if you want to adjust the previewlayer frame, here!
self.captureVideoPreviewLayer.frame = viewCanvasRecording.bounds;
self.captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[viewCanvasRecording.layer addSublayer: self.captureVideoPreviewLayer];
[self.captureSession startRunning];
}
-(void)captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection*)connection
{
BOOL frameFromVideoCaptured = NO;
@synchronized(self)
{
if (!isCapturingInput) // we haven't started filming yet just ignore the frames
return;
if(!_assetWriter)
{
[self setupWriter];
}
frameFromVideoCaptured=(captureOutput==videoOutput);
}
// pass frame to the assset writer
[self writeFrame:sampleBuffer isVideo:frameFromVideoCaptured];
}
-(BOOL)writeFrame:(CMSampleBufferRef)sampleBuffer isVideo:(BOOL)isVideo
{
if (CMSampleBufferDataIsReady(sampleBuffer))
{
if (_assetWriter.status == AVAssetWriterStatusUnknown)
{
CMTime startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
[_assetWriter startWriting];
[_assetWriter startSessionAtSourceTime:startTime];
}
if (_assetWriter.status == AVAssetWriterStatusFailed)
{
NSLog(@"writer error %@", _assetWriter.error.localizedDescription);
return NO;
}
if (isVideo)
{
if (_videoWriterInput.readyForMoreMediaData == YES)
{
[_videoWriterInput appendSampleBuffer:sampleBuffer];
return YES;
}
}
else
{
if (_audioWriterInput.readyForMoreMediaData)
{
[_audioWriterInput appendSampleBuffer:sampleBuffer];
return YES;
}
}
}
return NO;
}
@end
关于ios - 前后摄像头的iOS 8胶卷,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/30499781/
避免必须自己创建整个相机应用程序,我调用: Intent camera = new Intent(MediaStore.ACTION_IMAGE_CAPTURE); this.startActivit
我使用这种方法从前置摄像头录制视频: Recording video via Mediarecorder 它在我的 Nexus 4 上运行良好,但有人说有很多手机的前置摄像头无法录制视频,只能拍照。我
我正在使用 Android 手机的摄像头作为输入来测试成像算法,并且需要一种方法来始终如一地测试算法。理想情况下,我想获取预先录制的视频源并让手机“假装”视频源是来自相机的实时视频。 我理想的解决方案
我想在 android 上通过 v4l 访问外部 USB 摄像头。 我试过了 SimpleWebCam .在对原始源代码进行一些细微修改后,我实现了使其在 Root过的 android 设备上运行。然
我正在尝试连接两个连接到单个 USB 端口的 USB 网络摄像头。问题是当时只有一个摄像头工作...我在 python 中使用 OpenCV。这可能吗?我的目标是将多台相机连接到一台计算机以进行机器视
我想知道如何在 virtualbox 中使用笔记本电脑的内置网络摄像头和 android x86。 我已经尝试启动默认的“相机”应用程序,它告诉我必须配置 SDCard,我在本教程中所做的:SD ca
我在 64 位华硕的 Ubuntu 12.10 上安装了 ARToolKit。安装没有错误,所以我觉得我没问题。但是当我想尝试一个例子时,它找不到相机。如果我在 char *vconf = ""; 没
我想以编程方式移动 webvr 场景中 View 的位置。为此,我使用了position.add 方法。 以下是我如何以编程方式移动相机: 摄像机移至此处: var obj3d = docume
我正在使用 Camera 2 API 将 JPEG 图像保存在磁盘上。我的 Nexus 5X 目前有 3-4 fps,我想将其提高到 20-30。可能吗? 将图像格式更改为 YUV 我设法生成 30
Baby Monitor (http://www.babymonitor3g.com/) 等应用程序可让两台 iOS 设备相互连接。连接后,一台设备可以激活另一台设备上的摄像头、灯光和麦克风,即使该应
我有一个论坛帖子表单,允许发帖人附加录音和/或网络摄像头快照。这两个都是使用 navigator.getUserMedia() 实现的应用程序接口(interface)。对于音频,我建立了 varia
我对 Opencv 和 Python 有疑问。当我尝试从相机中查看帧时,它无法识别 USB 相机,我使用了带有两个 USB 相机的书籍中的标准代码,问题是只有一个相机工作,我不知道。我在 window
我编写了一个程序,基本上使用步进电机 + a4988 驱动程序将托盘放在连接到 Raspberry Pi 的相机下方。代码将托盘带到起始位置,迈出一步,拍照并重复 10 次。然后托盘返回到起始位置。我
我的 uEye 相机遇到了一个问题。使用我的笔记本电脑摄像头(id 0)或 usb 上的网络摄像头(id 1)这条线完美运行: TheVideoCapturer.open(1); (TheVideoC
我是 Android 版 openCV 的新手,我需要一个在后台运行的图像处理应用(检测图像的线条)。 我已经制作了一个应用程序来完成我需要的所有图像处理(使用 jni),但它不能在后台运行并且它使用
我正在尝试使用 OpenCV 从 USB 摄像头捕获视频。 #include #include using namespace std; using namespace cv; int main(
我正在寻找启用和禁用默认 iPhone 相机的方法,例如在特定时间或纬度/经度。有些地方是禁止摄像头的,所以我们可以在到达这样的地方时关闭它,这里只是举个例子。好吧,我认为在 iPhone 中禁用和启
有人有这种“东西”的工作样本吗? 在理论上,以这种方式实现它是个好主意,但我没有看到任何相关的代码 fragment 来说明如何实现它。 我不要花哨的东西,越简单越好。我只想在单独的线程上实现与相机控
我正在开发一个新网站。您可以加入房间通话并进行语音通话,因此可以使用您的网络摄像头,但您也可以共享您的屏幕。 问题是当我将轨道添加到流中时,对等点不再工作......我不知道如何解决这个问题。我还尝试
我需要在 Flutter 中创建一个考试应用程序,我们需要每隔一段时间拍摄用户的照片和视频,而在执行此操作时我们不想显示相机屏幕。 我尝试使用 Flutter 的 Camera 插件,但我无法找到任何
我是一名优秀的程序员,十分优秀!