- ubuntu12.04环境下使用kvm ioctl接口实现最简单的虚拟机
- Ubuntu 通过无线网络安装Ubuntu Server启动系统后连接无线网络的方法
- 在Ubuntu上搭建网桥的方法
- ubuntu 虚拟机上网方式及相关配置详解
CFSDN坚持开源创造价值,我们致力于搭建一个资源共享平台,让每一个IT人在这里找到属于你的精彩世界.
这篇CFSDN的博客文章iOS AVCaptureSession实现视频录制功能由作者收集整理,如果你对这篇文章有兴趣,记得点赞哟.
本文实例为大家分享了AVCaptureSession实现视频录制功能的具体代码,供大家参考,具体内容如下 。
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
|
#import "RecordingVideoViewController.h"
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>
@interface RecordingVideoViewController ()<AVCaptureFileOutputRecordingDelegate>
//会话 负责输入和输出设备之间的数据传递
@property (strong,nonatomic) AVCaptureSession *captureSession;
//设备输入 负责从AVCaptureDevice获得输入数据
@property (strong,nonatomic) AVCaptureDeviceInput *videoCaptureDeviceInput;
@property (strong,nonatomic) AVCaptureDeviceInput *audioCaptureDeviceInput;
//视频输出流
@property (strong,nonatomic) AVCaptureMovieFileOutput *captureMovieFileOutput;
//相机拍摄预览图层
@property (strong,nonatomic) AVCaptureVideoPreviewLayer *captureVideoPreviewLayer;
//自定义UI控件容器
@property (strong,nonatomic) UIView *viewContainer;
//聚焦图标
@property (strong,nonatomic) UIImageView *focusCursor;
//录制时长
@property (strong,nonatomic) UILabel *timeLabel;
//切换前后摄像头
@property (strong,nonatomic) UIButton *switchCameraBtn;
//改变焦距
@property (strong,nonatomic) UIButton *scaleBtn;
//计时器
@property (strong,nonatomic) NSTimer *timer;
@end
@implementation RecordingVideoViewController {
@
private
NSInteger _num;
CGFloat _kCameraScale;
}
- (UIView *)viewContainer {
if
(!_viewContainer) {
_viewContainer = [[UIView alloc] initWithFrame:[UIScreen mainScreen].bounds];
UIButton *takeButton = [UIButton buttonWithType:UIButtonTypeCustom];
takeButton.backgroundColor = [UIColor redColor];
[takeButton setTitle:@
"start"
forState:UIControlStateNormal];
[takeButton addTarget:self action:@selector(takeButtonClick:) forControlEvents:UIControlEventTouchUpInside];
_timeLabel = [[UILabel alloc] init];
_timeLabel.textColor = [UIColor redColor];
_timeLabel.textAlignment = NSTextAlignmentCenter;
_timeLabel.font = [UIFont boldSystemFontOfSize:20];
_timeLabel.text = @
"00:00"
;
_switchCameraBtn = [UIButton buttonWithType:UIButtonTypeCustom];
[_switchCameraBtn setTitle:@
"switch"
forState:UIControlStateNormal];
_switchCameraBtn.backgroundColor = [UIColor redColor];
[_switchCameraBtn addTarget:self action:@selector(switchCameraBtnClick) forControlEvents:UIControlEventTouchUpInside];
_scaleBtn = [UIButton buttonWithType:UIButtonTypeCustom];
[_scaleBtn setTitle:@
"1X"
forState:UIControlStateNormal];
_scaleBtn.backgroundColor = [UIColor redColor];
[_scaleBtn addTarget:self action:@selector(scaleBtnClick:) forControlEvents:UIControlEventTouchUpInside];
[_viewContainer addSubview:takeButton];
[_viewContainer addSubview:_timeLabel];
[_viewContainer addSubview:_scaleBtn];
[_viewContainer addSubview:_switchCameraBtn];
[takeButton mas_makeConstraints:^(MASConstraintMaker *make) {
make.size.mas_equalTo(CGSizeMake(60, 40));
make.centerX.mas_equalTo(_viewContainer);
make.bottom.mas_equalTo(_viewContainer).offset(-64);
}];
[_timeLabel mas_makeConstraints:^(MASConstraintMaker *make) {
make.centerX.mas_equalTo(_viewContainer);
make.height.mas_equalTo(@30);
make.top.mas_equalTo(_viewContainer);
}];
[_scaleBtn mas_makeConstraints:^(MASConstraintMaker *make) {
make.size.mas_equalTo(CGSizeMake(60, 40));
make.left.mas_equalTo(_viewContainer).offset(10);
make.top.mas_equalTo(_viewContainer);
}];
[_switchCameraBtn mas_makeConstraints:^(MASConstraintMaker *make) {
make.size.mas_equalTo(CGSizeMake(60, 40));
make.top.mas_equalTo(_viewContainer);
make.right.mas_equalTo(_viewContainer).offset(-10);
}];
_focusCursor = [[UIImageView alloc] init];
kBorder(_focusCursor, 1, [UIColor yellowColor]);
_focusCursor.alpha = 0;
[_viewContainer addSubview:self.focusCursor];
[_focusCursor mas_makeConstraints:^(MASConstraintMaker *make) {
make.size.mas_equalTo(CGSizeMake(40, 40));
make.center.mas_equalTo(_viewContainer);
}];
}
return
_viewContainer;
}
- (
void
)viewDidLoad {
[super viewDidLoad];
self.title = @
"视频录制"
;
_kCameraScale = 1.0f;
//初始化会话对象
_captureSession = [[AVCaptureSession alloc] init];
if
([_captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
_captureSession.sessionPreset = AVCaptureSessionPreset1280x720;
}
NSError *error = nil;
//获取视频输入对象
AVCaptureDevice *videoCaptureDevice = [self cameraDeviceWithPosition:(AVCaptureDevicePositionBack)];
if
(!videoCaptureDevice) {
NSLog(@
"获取后置摄像头失败!"
);
return
;
}
_videoCaptureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:videoCaptureDevice error:&error];
if
(error) {
NSLog(@
"取得视频设备输入对象时出错"
);
return
;
}
//获取音频输入对象
AVCaptureDevice *audioCatureDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];
_audioCaptureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:audioCatureDevice error:&error];
if
(error) {
NSLog(@
"取得音频设备输入对象时出错"
);
return
;
}
//初始化设备输出对象
_captureMovieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
//将设备输入添加到会话中
if
([_captureSession canAddInput:_videoCaptureDeviceInput]) {
[_captureSession addInput:_videoCaptureDeviceInput];
[_captureSession addInput:_audioCaptureDeviceInput];
//防抖功能
AVCaptureConnection *captureConnection = [_captureMovieFileOutput connectionWithMediaType:AVMediaTypeAudio];
if
([captureConnection isVideoStabilizationSupported]) {
captureConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
}
}
//将设备输出添加到会话中
if
([_captureSession canAddOutput:_captureMovieFileOutput]) {
[_captureSession addOutput:_captureMovieFileOutput];
}
//创建视频预览图层
_captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_captureSession];
self.viewContainer.layer.masksToBounds = YES;
_captureVideoPreviewLayer.frame = self.viewContainer.bounds;
_captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer:_captureVideoPreviewLayer];
//显示自定义控件
[self.view addSubview:self.viewContainer];
//添加点按聚焦手势
UITapGestureRecognizer *tapGesture = [[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(tapScreen:)];
[self.viewContainer addGestureRecognizer:tapGesture];
}
-(
void
)viewDidAppear:(
BOOL
)animated{
[super viewDidAppear:animated];
[self.captureSession startRunning];
}
-(
void
)viewDidDisappear:(
BOOL
)animated{
[super viewDidDisappear:animated];
[self.captureSession stopRunning];
[self.timer invalidate];
self.timer = nil;
}
- (
void
)viewWillDisappear:(
BOOL
)animated {
[super viewWillDisappear:animated];
[self.captureVideoPreviewLayer setAffineTransform:CGAffineTransformMakeScale(1, 1)];
}
- (
void
)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
}
//开始 + 暂停录制
- (
void
)takeButtonClick:(UIButton *)sender {
if
([self.captureMovieFileOutput isRecording]) {
[self.captureMovieFileOutput stopRecording];
[self.navigationController popViewControllerAnimated:YES];
}
else
{
AVCaptureConnection *captureConnection = [self.captureMovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
captureConnection.videoOrientation = [self.captureVideoPreviewLayer connection].videoOrientation;
NSString *filePath = [NSTemporaryDirectory() stringByAppendingPathComponent:@
"Movie.mov"
];
NSLog(@
"%@"
,filePath);
[self.captureMovieFileOutput startRecordingToOutputFileURL:[NSURL fileURLWithPath:filePath] recordingDelegate:self];
self.switchCameraBtn.hidden = YES;
sender.backgroundColor = [UIColor greenColor];
[sender setTitle:@
"stop"
forState:UIControlStateNormal];
self.timer = [NSTimer scheduledTimerWithTimeInterval:1 target:self selector:@selector(timeAction) userInfo:nil repeats:YES];
[self.timer setFireDate:[NSDate distantPast]];
}
}
//切换摄像头
- (
void
)switchCameraBtnClick {
AVCaptureDevicePosition currentPosition = self.videoCaptureDeviceInput.device.position;
AVCaptureDevicePosition toPosition;
if
(currentPosition == AVCaptureDevicePositionUnspecified ||
currentPosition == AVCaptureDevicePositionFront) {
toPosition = AVCaptureDevicePositionBack;
}
else
{
toPosition = AVCaptureDevicePositionFront;
}
AVCaptureDevice *toCapturDevice = [self cameraDeviceWithPosition:toPosition];
if
(!toCapturDevice) {
NSLog(@
"获取要切换的设备失败"
);
return
;
}
NSError *error = nil;
AVCaptureDeviceInput *toVideoDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:toCapturDevice error:&error];
if
(error) {
NSLog(@
"获取要切换的设备输入失败"
);
return
;
}
//改变会话配置
[self.captureSession beginConfiguration];
[self.captureSession removeInput:self.videoCaptureDeviceInput];
if
([self.captureSession canAddInput:toVideoDeviceInput]) {
[self.captureSession addInput:toVideoDeviceInput];
self.videoCaptureDeviceInput = toVideoDeviceInput;
}
//提交会话配置
[self.captureSession commitConfiguration];
}
//点按手势
- (
void
)tapScreen:(UITapGestureRecognizer *)tap {
CGPoint point = [tap locationInView:self.viewContainer];
//将界面point对应到摄像头point
CGPoint cameraPoint = [self.captureVideoPreviewLayer captureDevicePointOfInterestForPoint:point];
//设置聚光动画
self.focusCursor.center = point;
self.focusCursor.transform = CGAffineTransformMakeScale(1.5, 1.5);
self.focusCursor.alpha = 1.0f;
[UIView animateWithDuration:1 animations:^{
self.focusCursor.transform = CGAffineTransformIdentity;
} completion:^(
BOOL
finished) {
self.focusCursor.alpha = 0.0f;
}];
//设置聚光点坐标
[self focusWithMode:AVCaptureFocusModeAutoFocus exposureMode:AVCaptureExposureModeAutoExpose atPoint:cameraPoint];
}
/**设置聚焦点*/
-(
void
)focusWithMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode atPoint:(CGPoint)point{
AVCaptureDevice *captureDevice= [self.videoCaptureDeviceInput device];
NSError *error = nil;
//设置设备属性必须先解锁 然后加锁
if
([captureDevice lockForConfiguration:&error]) {
if
([captureDevice isFocusModeSupported:focusMode]) {
[captureDevice setFocusMode:focusMode];
}
if
([captureDevice isFocusPointOfInterestSupported]) {
[captureDevice setFocusPointOfInterest:point];
}
// //曝光
// if ([captureDevice isExposureModeSupported:exposureMode]) {
// [captureDevice setExposureMode:exposureMode];
// }
// if ([captureDevice isExposurePointOfInterestSupported]) {
// [captureDevice setExposurePointOfInterest:point];
// }
// //闪光灯模式
// if ([captureDevice isFlashModeSupported:AVCaptureFlashModeAuto]) {
// [captureDevice setFlashMode:AVCaptureFlashModeAuto];
// }
//加锁
[captureDevice unlockForConfiguration];
}
else
{
NSLog(@
"设置设备属性过程发生错误,错误信息:%@"
,error.localizedDescription);
}
}
//调整焦距
-(
void
)scaleBtnClick:(UIButton *)sender
{
_kCameraScale += 0.5;
if
(_kCameraScale > 3.0) {
_kCameraScale = 1.0;
}
//改变焦距
AVCaptureDevice *videoDevice = self.videoCaptureDeviceInput.device;
NSError *error = nil;
if
([videoDevice lockForConfiguration:&error]) {
[videoDevice setVideoZoomFactor:_kCameraScale];
[videoDevice unlockForConfiguration];
[sender setTitle:[NSString stringWithFormat:@
"%lgX"
,_kCameraScale] forState:UIControlStateNormal];
[CATransaction begin];
[CATransaction setAnimationDuration:0.25];
[self.captureVideoPreviewLayer setAffineTransform:CGAffineTransformMakeScale(_kCameraScale, _kCameraScale)];
[CATransaction commit];
}
else
{
NSLog(@
"修改设备属性失败!"
)
}
}
#pragma mark -------- AVCaptureFileOutputRecordingDelegate ----------
- (
void
)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections {
NSLog(@
"开始录制"
);
}
- (
void
)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error {
NSLog(@
"录制结束"
);
ALAssetsLibrary *assetsLibrary = [[ALAssetsLibrary alloc] init];
[assetsLibrary writeVideoAtPathToSavedPhotosAlbum:outputFileURL completionBlock:^(NSURL *assetURL, NSError *error) {
if
(error) {
NSLog(@
"保存视频到相簿过程中发生错误,错误信息:%@"
,error.localizedDescription);
}
}];
}
//录制计时
- (
void
)timeAction {
self.timeLabel.text = [NSString stringWithFormat:@
"%.2ld:%.2ld"
,_num/60,_num%60];
_num ++;
}
/**取得指定位置的摄像头*/
- (AVCaptureDevice *)cameraDeviceWithPosition:(AVCaptureDevicePosition )position{
NSArray *cameras = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for
(AVCaptureDevice *camera in cameras) {
if
([camera position] == position) {
return
camera;
}
}
return
nil;
}
@end
|
参考代码:
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
|
#import "VideoTestViewController.h"
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>
typedef
void
(^PropertyChangeBlock)(AVCaptureDevice *captureDevice);
@interface VideoTestViewController ()<AVCaptureFileOutputRecordingDelegate>
//视频文件输出代理
@property (strong,nonatomic) AVCaptureSession *captureSession;
//负责输入和输出设备之间的数据传递
@property (strong,nonatomic) AVCaptureDeviceInput *captureDeviceInput;
//负责从AVCaptureDevice获得输入数据
@property (strong,nonatomic) AVCaptureMovieFileOutput *captureMovieFileOutput;
//视频输出流
@property (strong,nonatomic) AVCaptureVideoPreviewLayer *captureVideoPreviewLayer;
//相机拍摄预览图层
@property (assign,nonatomic)
BOOL
enableRotation;
//是否允许旋转(注意在视频录制过程中禁止屏幕旋转)
@property (assign,nonatomic) CGRect *lastBounds;
//旋转的前大小
@property (assign,nonatomic) UIBackgroundTaskIdentifier backgroundTaskIdentifier;
//后台任务标识
@property (strong,nonatomic) UIView *viewContainer;
@property (strong,nonatomic) UIButton *takeButton;
//拍照按钮
@property (strong,nonatomic) UIImageView *focusCursor;
//聚焦光标
@end
@implementation VideoTestViewController
#pragma mark - 控制器视图方法
- (
void
)viewDidLoad {
[super viewDidLoad];
}
-(
void
)viewWillAppear:(
BOOL
)animated{
[super viewWillAppear:animated];
//初始化会话
_captureSession=[[AVCaptureSession alloc]init];
if
([_captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
//设置分辨率
_captureSession.sessionPreset=AVCaptureSessionPreset1280x720;
}
//获得输入设备
AVCaptureDevice *captureDevice=[self getCameraDeviceWithPosition:AVCaptureDevicePositionBack];
//取得后置摄像头
if
(!captureDevice) {
NSLog(@
"取得后置摄像头时出现问题."
);
return
;
}
//添加一个音频输入设备
AVCaptureDevice *audioCaptureDevice=[[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];
NSError *error=nil;
//根据输入设备初始化设备输入对象,用于获得输入数据
_captureDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:captureDevice error:&error];
if
(error) {
NSLog(@
"取得设备输入对象时出错,错误原因:%@"
,error.localizedDescription);
return
;
}
AVCaptureDeviceInput *audioCaptureDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:audioCaptureDevice error:&error];
if
(error) {
NSLog(@
"取得设备输入对象时出错,错误原因:%@"
,error.localizedDescription);
return
;
}
//初始化设备输出对象,用于获得输出数据
_captureMovieFileOutput=[[AVCaptureMovieFileOutput alloc]init];
//将设备输入添加到会话中
if
([_captureSession canAddInput:_captureDeviceInput]) {
[_captureSession addInput:_captureDeviceInput];
[_captureSession addInput:audioCaptureDeviceInput];
AVCaptureConnection *captureConnection=[_captureMovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if
([captureConnection isVideoStabilizationSupported ]) {
captureConnection.preferredVideoStabilizationMode=AVCaptureVideoStabilizationModeAuto;
}
}
//将设备输出添加到会话中
if
([_captureSession canAddOutput:_captureMovieFileOutput]) {
[_captureSession addOutput:_captureMovieFileOutput];
}
//创建视频预览层,用于实时展示摄像头状态
_captureVideoPreviewLayer=[[AVCaptureVideoPreviewLayer alloc]initWithSession:self.captureSession];
CALayer *layer=self.viewContainer.layer;
layer.masksToBounds=YES;
_captureVideoPreviewLayer.frame=layer.bounds;
_captureVideoPreviewLayer.videoGravity=AVLayerVideoGravityResizeAspectFill;
//填充模式
//将视频预览层添加到界面中
//[layer addSublayer:_captureVideoPreviewLayer];
[layer insertSublayer:_captureVideoPreviewLayer below:self.focusCursor.layer];
_enableRotation=YES;
[self addNotificationToCaptureDevice:captureDevice];
[self addGenstureRecognizer];
}
-(
void
)viewDidAppear:(
BOOL
)animated{
[super viewDidAppear:animated];
[self.captureSession startRunning];
}
-(
void
)viewDidDisappear:(
BOOL
)animated{
[super viewDidDisappear:animated];
[self.captureSession stopRunning];
}
- (
void
)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
}
-(
BOOL
)shouldAutorotate{
return
self.enableRotation;
}
////屏幕旋转时调整视频预览图层的方向
//-(void)willTransitionToTraitCollection:(UITraitCollection *)newCollection withTransitionCoordinator:(id<UIViewControllerTransitionCoordinator>)coordinator{
// [super willTransitionToTraitCollection:newCollection withTransitionCoordinator:coordinator];
//// NSLog(@"%i,%i",newCollection.verticalSizeClass,newCollection.horizontalSizeClass);
// UIInterfaceOrientation orientation = [[UIApplication sharedApplication] statusBarOrientation];
// NSLog(@"%i",orientation);
// AVCaptureConnection *captureConnection=[self.captureVideoPreviewLayer connection];
// captureConnection.videoOrientation=orientation;
//
//}
//屏幕旋转时调整视频预览图层的方向
-(
void
)willRotateToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation duration:(NSTimeInterval)duration{
AVCaptureConnection *captureConnection=[self.captureVideoPreviewLayer connection];
captureConnection.videoOrientation=(AVCaptureVideoOrientation)toInterfaceOrientation;
}
//旋转后重新设置大小
-(
void
)didRotateFromInterfaceOrientation:(UIInterfaceOrientation)fromInterfaceOrientation{
_captureVideoPreviewLayer.frame=self.viewContainer.bounds;
}
-(
void
)dealloc{
[self removeNotification];
}
#pragma mark - UI方法
#pragma mark 视频录制
- (
void
)takeButtonClick:(UIButton *)sender {
//根据设备输出获得连接
AVCaptureConnection *captureConnection=[self.captureMovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
//根据连接取得设备输出的数据
if
(![self.captureMovieFileOutput isRecording]) {
self.enableRotation=NO;
//如果支持多任务则则开始多任务
if
([[UIDevice currentDevice] isMultitaskingSupported]) {
self.backgroundTaskIdentifier=[[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:nil];
}
//预览图层和视频方向保持一致
captureConnection.videoOrientation=[self.captureVideoPreviewLayer connection].videoOrientation;
NSString *outputFielPath=[NSTemporaryDirectory() stringByAppendingString:@
"myMovie.mov"
];
NSLog(@
"save path is :%@"
,outputFielPath);
NSURL *fileUrl=[NSURL fileURLWithPath:outputFielPath];
[self.captureMovieFileOutput startRecordingToOutputFileURL:fileUrl recordingDelegate:self];
}
else
{
[self.captureMovieFileOutput stopRecording];
//停止录制
}
}
#pragma mark 切换前后摄像头
- (
void
)toggleButtonClick:(UIButton *)sender {
AVCaptureDevice *currentDevice=[self.captureDeviceInput device];
AVCaptureDevicePosition currentPosition=[currentDevice position];
[self removeNotificationFromCaptureDevice:currentDevice];
AVCaptureDevice *toChangeDevice;
AVCaptureDevicePosition toChangePosition=AVCaptureDevicePositionFront;
if
(currentPosition==AVCaptureDevicePositionUnspecified||currentPosition==AVCaptureDevicePositionFront) {
toChangePosition=AVCaptureDevicePositionBack;
}
toChangeDevice=[self getCameraDeviceWithPosition:toChangePosition];
[self addNotificationToCaptureDevice:toChangeDevice];
//获得要调整的设备输入对象
AVCaptureDeviceInput *toChangeDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:toChangeDevice error:nil];
//改变会话的配置前一定要先开启配置,配置完成后提交配置改变
[self.captureSession beginConfiguration];
//移除原有输入对象
[self.captureSession removeInput:self.captureDeviceInput];
//添加新的输入对象
if
([self.captureSession canAddInput:toChangeDeviceInput]) {
[self.captureSession addInput:toChangeDeviceInput];
self.captureDeviceInput=toChangeDeviceInput;
}
//提交会话配置
[self.captureSession commitConfiguration];
}
#pragma mark - 视频输出代理
-(
void
)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections{
NSLog(@
"开始录制..."
);
}
-(
void
)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error{
NSLog(@
"视频录制完成."
);
//视频录入完成之后在后台将视频存储到相簿
self.enableRotation=YES;
UIBackgroundTaskIdentifier lastBackgroundTaskIdentifier=self.backgroundTaskIdentifier;
self.backgroundTaskIdentifier=UIBackgroundTaskInvalid;
ALAssetsLibrary *assetsLibrary=[[ALAssetsLibrary alloc]init];
[assetsLibrary writeVideoAtPathToSavedPhotosAlbum:outputFileURL completionBlock:^(NSURL *assetURL, NSError *error) {
if
(error) {
NSLog(@
"保存视频到相簿过程中发生错误,错误信息:%@"
,error.localizedDescription);
}
if
(lastBackgroundTaskIdentifier!=UIBackgroundTaskInvalid) {
[[UIApplication sharedApplication] endBackgroundTask:lastBackgroundTaskIdentifier];
}
NSLog(@
"成功保存视频到相簿."
);
}];
}
#pragma mark - 通知
/**
* 给输入设备添加通知
*/
-(
void
)addNotificationToCaptureDevice:(AVCaptureDevice *)captureDevice{
//注意添加区域改变捕获通知必须首先设置设备允许捕获
[self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
captureDevice.subjectAreaChangeMonitoringEnabled=YES;
}];
NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];
//捕获区域发生改变
[notificationCenter addObserver:self selector:@selector(areaChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
}
-(
void
)removeNotificationFromCaptureDevice:(AVCaptureDevice *)captureDevice{
NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];
[notificationCenter removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
}
/**
* 移除所有通知
*/
-(
void
)removeNotification{
NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];
[notificationCenter removeObserver:self];
}
-(
void
)addNotificationToCaptureSession:(AVCaptureSession *)captureSession{
NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];
//会话出错
[notificationCenter addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:captureSession];
}
/**
* 设备连接成功
*
* @param notification 通知对象
*/
-(
void
)deviceConnected:(NSNotification *)notification{
NSLog(@
"设备已连接..."
);
}
/**
* 设备连接断开
*
* @param notification 通知对象
*/
-(
void
)deviceDisconnected:(NSNotification *)notification{
NSLog(@
"设备已断开."
);
}
/**
* 捕获区域改变
*
* @param notification 通知对象
*/
-(
void
)areaChange:(NSNotification *)notification{
NSLog(@
"捕获区域改变..."
);
}
/**
* 会话出错
*
* @param notification 通知对象
*/
-(
void
)sessionRuntimeError:(NSNotification *)notification{
NSLog(@
"会话发生错误."
);
}
#pragma mark - 私有方法
/**
* 取得指定位置的摄像头
*
* @param position 摄像头位置
*
* @return 摄像头设备
*/
-(AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition )position{
NSArray *cameras= [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for
(AVCaptureDevice *camera in cameras) {
if
([camera position]==position) {
return
camera;
}
}
return
nil;
}
/**
* 改变设备属性的统一操作方法
*
* @param propertyChange 属性改变操作
*/
-(
void
)changeDeviceProperty:(PropertyChangeBlock)propertyChange{
AVCaptureDevice *captureDevice= [self.captureDeviceInput device];
NSError *error;
//注意改变设备属性前一定要首先调用lockForConfiguration:调用完之后使用unlockForConfiguration方法解锁
if
([captureDevice lockForConfiguration:&error]) {
propertyChange(captureDevice);
[captureDevice unlockForConfiguration];
}
else
{
NSLog(@
"设置设备属性过程发生错误,错误信息:%@"
,error.localizedDescription);
}
}
/**
* 设置闪光灯模式
*
* @param flashMode 闪光灯模式
*/
-(
void
)setFlashMode:(AVCaptureFlashMode )flashMode{
[self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
if
([captureDevice isFlashModeSupported:flashMode]) {
[captureDevice setFlashMode:flashMode];
}
}];
}
/**
* 设置聚焦模式
*
* @param focusMode 聚焦模式
*/
-(
void
)setFocusMode:(AVCaptureFocusMode )focusMode{
[self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
if
([captureDevice isFocusModeSupported:focusMode]) {
[captureDevice setFocusMode:focusMode];
}
}];
}
/**
* 设置曝光模式
*
* @param exposureMode 曝光模式
*/
-(
void
)setExposureMode:(AVCaptureExposureMode)exposureMode{
[self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
if
([captureDevice isExposureModeSupported:exposureMode]) {
[captureDevice setExposureMode:exposureMode];
}
}];
}
/**
* 设置聚焦点
*
* @param point 聚焦点
*/
-(
void
)focusWithMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode atPoint:(CGPoint)point{
[self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
if
([captureDevice isFocusModeSupported:focusMode]) {
[captureDevice setFocusMode:AVCaptureFocusModeAutoFocus];
}
if
([captureDevice isFocusPointOfInterestSupported]) {
[captureDevice setFocusPointOfInterest:point];
}
if
([captureDevice isExposureModeSupported:exposureMode]) {
[captureDevice setExposureMode:AVCaptureExposureModeAutoExpose];
}
if
([captureDevice isExposurePointOfInterestSupported]) {
[captureDevice setExposurePointOfInterest:point];
}
}];
}
/**
* 添加点按手势,点按时聚焦
*/
-(
void
)addGenstureRecognizer{
UITapGestureRecognizer *tapGesture=[[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(tapScreen:)];
[self.viewContainer addGestureRecognizer:tapGesture];
}
-(
void
)tapScreen:(UITapGestureRecognizer *)tapGesture{
CGPoint point= [tapGesture locationInView:self.viewContainer];
//将UI坐标转化为摄像头坐标
CGPoint cameraPoint= [self.captureVideoPreviewLayer captureDevicePointOfInterestForPoint:point];
[self setFocusCursorWithPoint:point];
[self focusWithMode:AVCaptureFocusModeAutoFocus exposureMode:AVCaptureExposureModeAutoExpose atPoint:cameraPoint];
}
/**
* 设置聚焦光标位置
*
* @param point 光标位置
*/
-(
void
)setFocusCursorWithPoint:(CGPoint)point{
self.focusCursor.center=point;
self.focusCursor.transform=CGAffineTransformMakeScale(1.5, 1.5);
self.focusCursor.alpha=1.0;
[UIView animateWithDuration:1.0 animations:^{
self.focusCursor.transform=CGAffineTransformIdentity;
} completion:^(
BOOL
finished) {
self.focusCursor.alpha=0;
}];
}
@end
|
以上就是本文的全部内容,希望对大家的学习有所帮助,也希望大家多多支持我.
原文链接:https://blog.csdn.net/nb_token/article/details/73291935 。
最后此篇关于iOS AVCaptureSession实现视频录制功能的文章就讲到这里了,如果你想了解更多关于iOS AVCaptureSession实现视频录制功能的内容请搜索CFSDN的文章或继续浏览相关文章,希望大家以后支持我的博客! 。
IO 设备如何知道属于它的内存中的值在memory mapped IO 中发生了变化? ? 例如,假设内存地址 0 专用于保存 VGA 设备的背景颜色。当我们更改 memory[0] 中的值时,VGA
我目前正在开发一个使用Facebook sdk登录(通过FBLoginView)的iOS应用。 一切正常,除了那些拥有较旧版本的facebook的人。 当他们按下“使用Facebook登录”按钮时,他
假设我有: this - is an - example - with some - dashesNSRange将使用`rangeOfString:@“-”拾取“-”的第一个实例,但是如果我只想要最后
Card.io SDK提供以下详细信息: 卡号,有效期,月份,年份,CVV和邮政编码。 如何从此SDK获取国家名称。 - (void)userDidProvideCreditCardInfo:(Car
iOS 应用程序如何从网络服务下载图片并在安装过程中将它们安装到用户的 iOS 设备上?可能吗? 最佳答案 您无法控制应用在用户设备上的安装,因此无法在安装过程中下载其他数据。 只需在安装后首次启动应
我曾经开发过一款企业版 iOS 产品,我们公司曾将其出售给大型企业,供他们的员工使用。 该应用程序通过 AppStore 提供,企业用户获得了公司特定的配置文件(包含应用程序配置文件)以启用他们有权使
我正在尝试将 Card.io SDK 集成到我的 iOS 应用程序中。我想为 CardIO ui 做一个简单的本地化,如更改取消按钮标题或“在此保留信用卡”提示文本。 我在 github 上找到了这个
我正在使用 CardIOView 和 CardIOViewDelegate 类,没有可以设置为 YES 的 BOOL 来扫描 collectCardholderName。我可以看到它在 CardIOP
我有一个集成了通话工具包的 voip 应用程序。每次我从我的 voip 应用程序调用时,都会在 native 电话应用程序中创建一个新的最近通话记录。我在 voip 应用程序中也有自定义联系人(电话应
iOS 应用程序如何知道应用程序打开时屏幕上是否已经有键盘?应用程序运行后,它可以接收键盘显示/隐藏通知。但是,如果应用程序在分屏模式下作为辅助应用程序打开,而主应用程序已经显示键盘,则辅助应用程序不
我在模拟器中收到以下错误: ImageIO: CGImageReadSessionGetCachedImageBlockData *** CGImageReadSessionGetCachedIm
如 Apple 文档所示,可以通过 EAAccessory Framework 与经过认证的配件(由 Apple 认证)进行通信。但是我有点困惑,因为一些帖子告诉我它也可以通过 CoreBluetoo
尽管现在的调试器已经很不错了,但有时找出应用程序中正在发生的事情的最好方法仍然是古老的 NSLog。当您连接到计算机时,这样做很容易; Xcode 会帮助弹出日志查看器面板,然后就可以了。当您不在办公
在我的 iOS 应用程序中,我定义了一些兴趣点。其中一些有一个 Kontakt.io 信标的名称,它绑定(bind)到一个特定的 PoI(我的意思是通常贴在信标标签上的名称)。现在我想在附近发现信标,
我正在为警报提示创建一个 trigger.io 插件。尝试从警报提示返回数据。这是我的代码: // Prompt + (void)show_prompt:(ForgeTask*)task{
您好,我是 Apple iOS 的新手。我阅读并搜索了很多关于推送通知的文章,但我没有发现任何关于 APNS 从 io4 到 ios 6 的新更新的信息。任何人都可以向我提供 APNS 如何在 ios
UITabBar 的高度似乎在 iOS 7 和 8/9/10/11 之间发生了变化。我发布这个问题是为了让其他人轻松找到答案。 那么:在 iPhone 和 iPad 上的 iOS 8/9/10/11
我想我可以针对不同的 iOS 版本使用不同的 Storyboard。 由于 UI 的差异,我将创建下一个 Storyboard: Main_iPhone.storyboard Main_iPad.st
我正在写一些东西,我将使用设备的 iTunes 库中的一部分音轨来覆盖 2 个视频的组合,例如: AVMutableComposition* mixComposition = [[AVMutableC
我创建了一个简单的 iOS 程序,可以顺利编译并在 iPad 模拟器上运行良好。当我告诉 XCode 4 使用我连接的 iPad 设备时,无法编译相同的程序。问题似乎是当我尝试使用附加的 iPad 时
我是一名优秀的程序员,十分优秀!