gpt4 book ai didi

iOS使用AVFoundation展示视频

转载 作者:qq735679552 更新时间:2022-09-28 22:32:09 26 4
gpt4 key购买 nike

CFSDN坚持开源创造价值,我们致力于搭建一个资源共享平台,让每一个IT人在这里找到属于你的精彩世界.

这篇CFSDN的博客文章iOS使用AVFoundation展示视频由作者收集整理,如果你对这篇文章有兴趣,记得点赞哟.

本文实例为大家分享了iOS使用AVFoundation展示视频的具体代码,供大家参考,具体内容如下 。

?
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
//
// Capter2ViewController.m
// IosTest
//
// Created by garin on 13-7-19.
// Copyright (c) 2013年 garin. All rights reserved.
//
 
#import "Capter2ViewController.h"
 
@interface Capter2ViewController ()
@end
@implementation Capter2ViewController
-( void ) dealloc
{
   [session release];
   [super dealloc];
}
 
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
   self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
   if (self) {
     // Custom initialization
   }
   return self;
}
 
- ( void )viewDidLoad
{
   [super viewDidLoad];
   videoPreviewView=[[UIView alloc] initWithFrame:CGRectMake(10, 10, 320, 200)];
   [self.view addSubview:videoPreviewView];
   [videoPreviewView release];
  // Do any additional setup after loading the view.
  
   //在viewdidload调用下面的函数显示摄像信息
   [self setupCaptureSession];
  
//  imgView=[[UIImageView alloc] initWithFrame:CGRectMake(10, 230, 320, 100)];
//  imgView.backgroundColor=[UIColor grayColor];
//  [self.view addSubview:imgView];
//  [imgView release];
  
   UIButton *cloeseBtn=[UIButton buttonWithType:UIButtonTypeRoundedRect];
   cloeseBtn.frame=CGRectMake(10, 220, 300, 50);
   [cloeseBtn setTitle:@ "Press" forState:UIControlStateNormal];
   [cloeseBtn addTarget:self action:@selector(closeBtnClick:) forControlEvents:UIControlEventTouchUpInside];
   [self.view addSubview:cloeseBtn];
}
 
-( void ) closeBtnClick:(id) sender
{
   [session stopRunning];
}
 
- ( void )didReceiveMemoryWarning
{
   [super didReceiveMemoryWarning];
   // Dispose of any resources that can be recreated.
}
 
- ( void )setupCaptureSession
{
   NSError *error = nil;
  
   // Create the session
   session = [[AVCaptureSession alloc] init];
  
   // Configure the session to produce lower resolution video frames, if your
   // processing algorithm can cope. We'll specify medium quality for the
   // chosen device.
   session.sessionPreset = AVCaptureSessionPresetLow;
  
   // Find a suitable AVCaptureDevice
   AVCaptureDevice *device = [AVCaptureDevice
                 defaultDeviceWithMediaType:AVMediaTypeVideo];
  
   // Create a device input with the device and add it to the session.
   AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device
                                     error:&error];
   if (!input) {
     // Handling the error appropriately.
   }
   [session addInput:input];
  
   // Create a VideoDataOutput and add it to the session
   AVCaptureVideoDataOutput *output = [[[AVCaptureVideoDataOutput alloc] init] autorelease];
   [session addOutput:output];
  
   // Configure your output.
   dispatch_queue_t queue = dispatch_queue_create( "myQueue" , NULL);
   [output setSampleBufferDelegate:self queue:queue];
   dispatch_release(queue);
  
   // Specify the pixel format
   output.videoSettings =
   [NSDictionary dictionaryWithObject:
    [NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
                 forKey:(id)kCVPixelBufferPixelFormatTypeKey];
  
  
   // If you wish to cap the frame rate to a known value, such as 15 fps, set
   // minFrameDuration.
   //output.minFrameDuration = CMTimeMake(1, 15);
   //AVCaptureConnection *avcaptureconn=[[AVCaptureConnection alloc] init];
   //[avcaptureconn setVideoMinFrameDuration:CMTimeMake(1, 15)];
   // Start the session running to start the flow of data
   [session startRunning];
   AVCaptureVideoPreviewLayer* previewLayer = [AVCaptureVideoPreviewLayer layerWithSession: session];
   previewLayer.frame = videoPreviewView.bounds; //视频显示到的UIView
   previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
//  [previewLayer setOrientation:AVCaptureVideoOrientationLandscapeRight];
   //  if(previewLayer.orientationSupported){
   //   previewLayer.orientation = mOrientation;
   //  }
  
   [videoPreviewView.layer addSublayer: previewLayer];
  
   if (![session isRunning]){
     [session startRunning];
   }
  
   // Assign session to an ivar.
   //[self setSession:session];
}
 
//得到视频流
- ( void )captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
     fromConnection:(AVCaptureConnection *)connection
{
   // Create a UIImage from the sample buffer data
   return ;
  
   UIImage *image = [self imageFromSampleBuffer:sampleBuffer];
   //得到的视频流图片
   imgView.image=image;
}
 
// Create a UIImage from sample buffer data
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
   // Get a CMSampleBuffer's Core Video image buffer for the media data
   CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
   // Lock the base address of the pixel buffer
   CVPixelBufferLockBaseAddress(imageBuffer, 0);
  
   // Get the number of bytes per row for the pixel buffer
   void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
  
   // Get the number of bytes per row for the pixel buffer
   size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
   // Get the pixel buffer width and height
   size_t width = CVPixelBufferGetWidth(imageBuffer);
   size_t height = CVPixelBufferGetHeight(imageBuffer);
  
   // Create a device-dependent RGB color space
   CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
  
   // Create a bitmap graphics context with the sample buffer data
   CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
                          bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
   // Create a Quartz image from the pixel data in the bitmap graphics context
   CGImageRef quartzImage = CGBitmapContextCreateImage(context);
   // Unlock the pixel buffer
   CVPixelBufferUnlockBaseAddress(imageBuffer,0);
  
   // Free up the context and color space
   CGContextRelease(context);
   CGColorSpaceRelease(colorSpace);
  
   // Create an image object from the Quartz image
   UIImage *image = [UIImage imageWithCGImage:quartzImage];
  
   // Release the Quartz image
   CGImageRelease(quartzImage);
  
   return (image);
}
 
@end

以上就是本文的全部内容,希望对大家的学习有所帮助,也希望大家多多支持我.

原文链接:https://blog.csdn.net/yongyinmg/article/details/9379151 。

最后此篇关于iOS使用AVFoundation展示视频的文章就讲到这里了,如果你想了解更多关于iOS使用AVFoundation展示视频的内容请搜索CFSDN的文章或继续浏览相关文章,希望大家以后支持我的博客! 。

26 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com