gpt4 book ai didi

ios - 在 Swift for iOS 中使用 AVFoundation 录制视频

转载 作者:行者123 更新时间:2023-11-28 06:29:17 31 4
gpt4 key购买 nike

我在使用提供的代码录制视频时遇到问题。我正在使用为录制视频而创建的示例代码。

具体来说,我无法在没有此错误的情况下编译此行:“无法将类型‘ViewController’的值转换为指定类型‘AVCaptureFileOutputRecordingDelegate’

var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self

此行位于 IBAction 函数中:

    @IBAction func RecordButtonPressed(_ sender: Any) {

var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self

var videoFileOutput = AVCaptureMovieFileOutput()
self.captureSession.addOutput(videoFileOutput)

let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let filePath = documentsURL.appendingPathComponent("temp")

videoFileOutput.startRecording(toOutputFileURL: filePath, recordingDelegate: recordingDelegate)

RecordButton.setTitle("Stop", for: .normal);

}

其余代码在这里:

import UIKit
import AVFoundation
import Darwin




class ViewController: UIViewController {



@IBOutlet weak var CameraView: UIImageView!

@IBOutlet weak var RecordButton: UIButton!

@IBOutlet weak var SelectFrButton: UIButton!

@IBOutlet weak var ISOslider: UISlider!

@IBOutlet weak var SSslider: UISlider!

@IBOutlet weak var ISOtextfield: UITextField!

@IBOutlet weak var SStextfield: UITextField!

@IBOutlet weak var TorchSlider: UISlider!

@IBOutlet weak var Torchtextfield: UITextField!

var captureSession = AVCaptureSession();
var DisplaySessionOutput = AVCaptureVideoDataOutput();
var SaveSessionOutput = AVCaptureMovieFileOutput();
var previewLayer = AVCaptureVideoPreviewLayer();
var CaptureDevice:AVCaptureDevice? = nil;
var CurrentTorchLevel:Float = 0.5;


override func viewDidLoad() {
super.viewDidLoad()

captureSession.sessionPreset = AVCaptureSessionPresetHigh
// Loop through all the capture devices on this phone

let deviceDiscoverySession = AVCaptureDeviceDiscoverySession(deviceTypes: [AVCaptureDeviceType.builtInDuoCamera, AVCaptureDeviceType.builtInTelephotoCamera,AVCaptureDeviceType.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: AVCaptureDevicePosition.unspecified)

for device in (deviceDiscoverySession?.devices)! {
if(device.position == AVCaptureDevicePosition.back){
do{

try device.lockForConfiguration()


device.setExposureModeCustomWithDuration(CMTimeMake(1, 30), iso: 50, completionHandler: { (time) in

// Set text and sliders to correct levels
self.ISOslider.maximumValue = (self.CaptureDevice?.activeFormat.maxISO)!;
self.ISOslider.minimumValue = (self.CaptureDevice?.activeFormat.minISO)!;

self.SSslider.maximumValue = Float((self.CaptureDevice?.activeFormat.maxExposureDuration.seconds)!);
self.SSslider.minimumValue = Float((self.CaptureDevice?.activeFormat.minExposureDuration.seconds)!);

self.ISOtextfield.text = device.iso.description;
self.ISOslider.setValue(device.iso, animated: false)

self.SStextfield.text = device.exposureDuration.seconds.description;
self.SSslider.setValue(Float(device.exposureDuration.seconds), animated: false);

self.TorchSlider.minimumValue = 0.01;
self.TorchSlider.maximumValue = 1;
self.TorchSlider.value = 0.5;
self.Torchtextfield.text = "0.5";
})




//Turn torch on

if (device.torchMode == AVCaptureTorchMode.on) {
device.torchMode = AVCaptureTorchMode.off
} else {
try device.setTorchModeOnWithLevel(1.0)

}

device.unlockForConfiguration();

CaptureDevice = device;

let input = try AVCaptureDeviceInput(device: CaptureDevice)
if(captureSession.canAddInput(input)){
captureSession.addInput(input);

if(captureSession.canAddOutput(DisplaySessionOutput)){
captureSession.addOutput(DisplaySessionOutput);
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession);
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait;
CameraView.layer.addSublayer(previewLayer);
}
}
}
catch{
print("exception!");
}
}
}

CameraView.transform = CGAffineTransform.init(scaleX: -1, y: -1);

captureSession.startRunning()


}

// Do any additional setup after loading the view, typically from a nib.


override func viewDidLayoutSubviews() {

previewLayer.frame = CameraView.bounds

}


override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}


@IBAction func RecordButtonPressed(_ sender: Any) {

var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self

var videoFileOutput = AVCaptureMovieFileOutput()
self.captureSession.addOutput(videoFileOutput)

let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let filePath = documentsURL.appendingPathComponent("temp")

videoFileOutput.startRecording(toOutputFileURL: filePath, recordingDelegate: recordingDelegate)

RecordButton.setTitle("Stop", for: .normal);

}

@IBAction func ISOvaluechanged(_ sender: Any) {

SetVideoSettings(isolevel: ISOslider.value, exposurelevel: AVCaptureExposureDurationCurrent, TorchLevel: CurrentTorchLevel)
}

@IBAction func SSvaluechanged(_ sender: Any) {

let time = CMTimeMake(Int64(self.SSslider.value * 1000000),1000000);
SetVideoSettings(isolevel: AVCaptureISOCurrent, exposurelevel: time, TorchLevel: CurrentTorchLevel)
}

@IBAction func ISOtextchanged(_ sender: Any) {

}

@IBAction func SStextchanged(_ sender: Any) {

//let time = CMTimeMake(Int64(exposurelevel * 100000),100000);

}


@IBAction func ChooseButtonPressed(_ sender: Any) {
}

func ShowAlert(AlertMessage: String) {

let alertController = UIAlertController(title: "Alert", message: AlertMessage, preferredStyle: .alert)

self.present(alertController, animated: true, completion:nil)

let OKAction = UIAlertAction(title: "OK", style: .default) { (action:UIAlertAction) in
}

alertController.addAction(OKAction)

}

@IBAction func TorchSliderChanged(_ sender: Any) {

CurrentTorchLevel = self.TorchSlider.value;
SetVideoSettings(isolevel: AVCaptureISOCurrent, exposurelevel: AVCaptureExposureDurationCurrent, TorchLevel: CurrentTorchLevel);
}

func SetVideoSettings(isolevel: Float, exposurelevel: CMTime, TorchLevel: Float) {

var newISOval = isolevel;
var newSSval = exposurelevel;
let newTorchVal = TorchLevel;

if(newISOval == FLT_MAX){
// Pass through 0,0 for maintaining current SS.
}

else if(newISOval > (self.CaptureDevice?.activeFormat.maxISO)!) {

newISOval = (self.CaptureDevice?.activeFormat.maxISO)!;
}

else if(newISOval < (self.CaptureDevice?.activeFormat.minISO)!) {

newISOval = (self.CaptureDevice?.activeFormat.minISO)!;
}

if(newSSval.timescale == 0){
// Pass through 0,0 for maintaining current SS.
}

else if(CMTimeCompare(newSSval, (self.CaptureDevice?.activeFormat.maxExposureDuration)!) > 0) {

newSSval = (self.CaptureDevice?.activeFormat.maxExposureDuration)!;
}

else if(CMTimeCompare(newSSval,(self.CaptureDevice?.activeFormat.minExposureDuration)!) < 0) {

newSSval = (self.CaptureDevice?.activeFormat.minExposureDuration)!;
}



do {

try self.CaptureDevice?.lockForConfiguration();

try CaptureDevice?.setTorchModeOnWithLevel(newTorchVal);

CaptureDevice?.setExposureModeCustomWithDuration(newSSval, iso: newISOval, completionHandler: { (time) in

// Set text and sliders to correct levels
self.ISOtextfield.text = self.CaptureDevice?.iso.description;
self.ISOslider.setValue((self.CaptureDevice?.iso)!, animated: false)

self.SStextfield.text = self.CaptureDevice?.exposureDuration.seconds.description;
self.SSslider.setValue(Float((self.CaptureDevice?.exposureDuration.seconds)!), animated: false);

self.TorchSlider.setValue(self.CurrentTorchLevel, animated: false);
self.Torchtextfield.text = self.CurrentTorchLevel.description;

})

self.CaptureDevice?.unlockForConfiguration();

}

catch {
ShowAlert(AlertMessage: "Unable to set camera settings");
self.CaptureDevice?.unlockForConfiguration();


}

}

func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
return
}

func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) {
return
}

}

感谢您提供的任何帮助!

最佳答案

为您的 UIViewController 做一个扩展,使其符合 AVCaptureFileOutputRecordingDelegate。删除 ViewController 类中的最后两个方法并将其添加到其中。

   class ViewController:UIViewController {
//your methods as usual but remove the final two methods and add them to the extension that follows. Those methods are what will make you conform to AVCaptureFileOutputRecordingDelegate
}

extension ViewController: AVCaptureFileOutputRecordingDelegate {
func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {

}

func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {

}
}

您可以通过如下所示扩展您的 UIViewController 来做同样的事情,但我想我会像上面那样为您提供一个干净的解决方案。您可以选择。

    class ViewController:UIViewController, AVCaptureFileOutputRecordingDelegate {
//your methods as usual but you keep your final two methods this time

func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {

}

func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {

}
}

关于ios - 在 Swift for iOS 中使用 AVFoundation 录制视频,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/40850689/

31 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com