gpt4 book ai didi

ios - 在写入磁盘之前,在录制时将 CATextLayer Overlay 写入 AVCaptureSession

转载 作者:行者123 更新时间:2023-11-29 12:21:33 27 4
gpt4 key购买 nike

我试图在通过 AVCaptureSession 记录到磁盘时以 CATextLayer 的形式编写时间码。这是我到目前为止的代码,我无法在文档中找到任何内容,也无法通过谷歌向我展示这是如何实现的。

最初我使用 GPUImage 完成此操作,但代码不稳定且崩溃。库的作者确认 GPUImage 目前不能可靠地用于此目的。

CaptureSessionManager.h

#import <CoreMedia/CoreMedia.h>
#import <AVFoundation/AVFoundation.h>


@interface CaptureSessionManager:NSObject

@property (nonatomic,strong) AVCaptureVideoPreviewLayer *previewLayer;
@property (nonatomic,strong) AVCaptureSession *captureSession;
@property (nonatomic,strong) AVCaptureMovieFileOutput *captureOutput;
@property (nonatomic,strong) AVCaptureDeviceInput *videoIn;

- (void)addVideoPreviewLayer;
- (void)addVideoInput;
- (void)addVideoOutput;
- (void)toggleDeviceCamera;

- (void)toggleRecording;

@end

CaptureSessionManager.m

#import "CaptureSessionManager.h"
#import <CoreMedia/CoreMedia.h>
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>
#import <QuartzCore/QuartzCore.h>
#import <CoreVideo/CoreVideo.h>

#define CAPTURE_FRAMES_PER_SECOND 20

@interface CaptureSessionManager() <AVCaptureFileOutputRecordingDelegate,
AVCaptureVideoDataOutputSampleBufferDelegate> {
BOOL isRecording;
}
@end

@implementation CaptureSessionManager
@synthesize captureSession;
@synthesize previewLayer;
@synthesize captureOutput;
@synthesize videoIn;

#pragma mark Capture Session Configuration

- (id)init {
if ((self = [super init])) {
[self setCaptureSession:[[AVCaptureSession alloc] init]];
}
return self;
}

- (void)addVideoPreviewLayer {
[self setPreviewLayer:[[[AVCaptureVideoPreviewLayer alloc] initWithSession:[self captureSession]] autorelease]];
[[self previewLayer] setVideoGravity:AVLayerVideoGravityResizeAspectFill];

}

- (void)addVideoInput {
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (videoDevice) {
NSError *error;
if ([videoDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus] &&
[videoDevice lockForConfiguration:&error]) {
[videoDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
[videoDevice unlockForConfiguration];
}
videoIn = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if (!error) {
if ([[self captureSession] canAddInput:videoIn]) {
[[self captureSession] addInput:videoIn];

} else
NSLog(@"Couldn't add video input");
}
else
NSLog(@"Couldn't create video input");
}
else
NSLog(@"Couldn't create video capture device");
}

- (void)addVideoOutput {
//ADD MOVIE FILE OUTPUT
NSLog(@"Adding movie file output");
captureOutput = [[AVCaptureMovieFileOutput alloc] init];

Float64 TotalSeconds = 60; //Total seconds
int32_t preferredTimeScale = 30; //Frames per second
CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale); //<<SET MAX DURATION
captureOutput.maxRecordedDuration = maxDuration;
captureOutput.minFreeDiskSpaceLimit = 1024 * 1024; //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME

if ([self.captureSession canAddOutput:captureOutput])
[self.captureSession addOutput:captureOutput];

//SET THE CONNECTION PROPERTIES (output properties)
[self CameraSetOutputProperties]; //(We call a method as it also has to be done after changing camera)

[self.captureSession setSessionPreset:AVCaptureSessionPresetMedium];
}

- (void) CameraSetOutputProperties
{
//SET THE CONNECTION PROPERTIES (output properties)
AVCaptureConnection *CaptureConnection = [captureOutput connectionWithMediaType:AVMediaTypeVideo];
}

- (void)toggleDeviceCamera
{
if ([[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count] > 1) //Only do if device has multiple cameras
{
NSLog(@"Toggle camera");
NSError *error;
//AVCaptureDeviceInput *videoInput = [self videoInput];
AVCaptureDeviceInput *NewVideoInput;
AVCaptureDevicePosition position = [[videoIn device] position];
if (position == AVCaptureDevicePositionBack)
{
NewVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self CameraWithPosition:AVCaptureDevicePositionFront] error:&error];
}
else if (position == AVCaptureDevicePositionFront)
{
NewVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self CameraWithPosition:AVCaptureDevicePositionBack] error:&error];
}

if (NewVideoInput != nil)
{
[self.captureSession beginConfiguration]; //We can now change the inputs and output configuration. Use commitConfiguration to end
[self.captureSession removeInput:videoIn];
if ([self.captureSession canAddInput:NewVideoInput])
{
[self.captureSession addInput:NewVideoInput];
videoIn = NewVideoInput;
}
else
{
[self.captureSession addInput:videoIn];
}

//Set the connection properties again
[self CameraSetOutputProperties];


[self.captureSession commitConfiguration];
[NewVideoInput release];
}
}
}

//********** START STOP RECORDING BUTTON **********
- (void)toggleRecording {

if (!isRecording)
{
//----- START RECORDING -----
NSLog(@"START RECORDING");
isRecording = YES;

//Create temporary URL to record to
NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath])
{
NSError *error;
if ([fileManager removeItemAtPath:outputPath error:&error] == NO)
{
//Error - handle if requried
}
}
//Start recording
[captureOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
}
else
{
//----- STOP RECORDING -----
NSLog(@"STOP RECORDING");
isRecording = NO;

[captureOutput stopRecording];
}
}

- (AVCaptureDevice *) CameraWithPosition:(AVCaptureDevicePosition) Position
{
NSArray *Devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *Device in Devices)
{
if ([Device position] == Position)
{
return Device;
}
}
return nil;
}

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
NSLog(@"a");
}

//********** DID FINISH RECORDING TO OUTPUT FILE AT URL **********
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error
{

NSLog(@"didFinishRecordingToOutputFileAtURL - enter");

BOOL RecordedSuccessfully = YES;
if ([error code] != noErr)
{
// A problem occurred: Find out if the recording was successful.
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value)
{
RecordedSuccessfully = [value boolValue];
}
}
if (RecordedSuccessfully)
{
//----- RECORDED SUCESSFULLY -----
NSLog(@"didFinishRecordingToOutputFileAtURL - success");
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL])
{
[library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
completionBlock:^(NSURL *assetURL, NSError *error)
{
if (error)
{

}
}];
}
}
}

- (void)dealloc {
[super dealloc];
}

@end

最佳答案

好吧,我能够得到我的解决方案,我真诚地希望这对某人有所帮助。要获得用于录制视频、音频和写入磁盘的基本 AVFoundation 设置,请从 Apple 下载 RosyWriter https://developer.apple.com/library/prerelease/ios/samplecode/RosyWriter/RosyWriter.zip

之后,只需访问提供的渲染器之一。我强烈建议使用 OpenGL 渲染器。您将需要加强 UIView 绘图,但这将使您完成 90% 的工作!

关于ios - 在写入磁盘之前,在录制时将 CATextLayer Overlay 写入 AVCaptureSession,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/30511777/

27 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com