- html - 出于某种原因,IE8 对我的 Sass 文件中继承的 html5 CSS 不友好?
- JMeter 在响应断言中使用 span 标签的问题
- html - 在 :hover and :active? 上具有不同效果的 CSS 动画
- html - 相对于居中的 html 内容固定的 CSS 重复背景?
当应用程序在视频录制和拍照之间切换时,我想防止延迟:通过仅使用 AVCaptureMovieFileOutput 并在捕获图像时从中获取快照。
就像 SnapChat 一样。
这有可能吗?我还没有找到任何与此相关的文章。
我不想在输出之间切换,因为它滞后
代码:
@IBOutlet var cameraView: UIView!
@IBOutlet var cameraSwitchButton: UIButton!
@IBOutlet var captureButtonView: CaptureButton!
@IBOutlet var cameraFlashButton: UIButton!
var captureSession = AVCaptureSession()
let movieOutput = AVCaptureMovieFileOutput()
var activeInput: AVCaptureDeviceInput!
var previewLayer = AVCaptureVideoPreviewLayer()
var outputURL: URL!
var connection : AVCaptureConnection!
override func viewDidLoad() {
if setupSession() {
setupPreview()
startSession()
connection = movieOutput.connection(with: AVMediaType.video)
if (connection?.isVideoStabilizationSupported)! {
connection?.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.off
}
}
let tapGesture = UITapGestureRecognizer(target: self, action: #selector(captureButtonTapped))
let longGesture = UILongPressGestureRecognizer(target: self, action: #selector(captureButtonLongPressed))
tapGesture.numberOfTapsRequired = 1
captureButtonView.addGestureRecognizer(tapGesture)
captureButtonView.addGestureRecognizer(longGesture)
}
@objc func captureButtonTapped(){
?? TAKE PHOTO HERE ??
}
var isRecordingVideo : Bool = false
@objc func captureButtonLongPressed(sender : UILongPressGestureRecognizer){
if sender.state == .began {
isRecordingVideo = true
startRecording()
captureButtonView.startTimer(duration: 10.0)
}
if sender.state == .ended || sender.state == .failed || sender.state == .cancelled {
captureButtonView.clear()
isRecordingVideo = false
stopRecording()
}
}
func setupPreview() {
// Configure previewLayer
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = cameraView.bounds
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
cameraView.layer.addSublayer(previewLayer)
}
//MARK:- Setup Camera
func setupSession() -> Bool {
captureSession.sessionPreset = AVCaptureSession.Preset.high
// Setup Camera
let camera = AVCaptureDevice.default(for: AVMediaType.video)
do {
let input = try AVCaptureDeviceInput(device: camera!)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
activeInput = input
}
} catch {
print("Error setting device video input: \(error)")
return false
}
// Setup Microphone
let microphone = AVCaptureDevice.default(for: AVMediaType.audio)
do {
let micInput = try AVCaptureDeviceInput(device: microphone!)
if captureSession.canAddInput(micInput) {
captureSession.addInput(micInput)
}
} catch {
print("Error setting device audio input: \(error)")
return false
}
// Movie output
if captureSession.canAddOutput(movieOutput) {
captureSession.addOutput(movieOutput)
}
return true
}
func setupCaptureMode(_ mode: Int) {
}
//MARK:- Camera Session
func startSession() {
if !captureSession.isRunning {
videoQueue().async {
self.captureSession.startRunning()
}
}
}
func stopSession() {
if captureSession.isRunning {
videoQueue().async {
self.captureSession.stopRunning()
}
}
}
func videoQueue() -> DispatchQueue {
return DispatchQueue.main
}
func currentVideoOrientation() -> AVCaptureVideoOrientation {
var orientation: AVCaptureVideoOrientation
switch UIDevice.current.orientation {
case .portrait:
orientation = AVCaptureVideoOrientation.portrait
case .landscapeRight:
orientation = AVCaptureVideoOrientation.landscapeLeft
case .portraitUpsideDown:
orientation = AVCaptureVideoOrientation.portraitUpsideDown
default:
orientation = AVCaptureVideoOrientation.landscapeRight
}
return orientation
}
func startCapture() {
startRecording()
}
func tempURL() -> URL? {
let directory = NSTemporaryDirectory() as NSString
if directory != "" {
let path = directory.appendingPathComponent(NSUUID().uuidString + ".mp4")
return URL(fileURLWithPath: path)
}
return nil
}
func startRecording() {
if movieOutput.isRecording == false {
if (connection?.isVideoOrientationSupported)! {
connection?.videoOrientation = currentVideoOrientation()
}
let device = activeInput.device
if (device.isSmoothAutoFocusSupported) {
do {
try device.lockForConfiguration()
device.isSmoothAutoFocusEnabled = false
device.unlockForConfiguration()
} catch {
print("Error setting configuration: \(error)")
}
}
outputURL = tempURL()
movieOutput.startRecording(to: outputURL, recordingDelegate: self)
}
else {
stopRecording()
}
}
func stopRecording() {
if movieOutput.isRecording == true {
movieOutput.stopRecording()
}
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if (error != nil) {
print("Error recording movie: \(error!.localizedDescription)")
} else {
UISaveVideoAtPathToSavedPhotosAlbum(outputURL.path, nil, nil, nil)
_ = outputURL as URL
}
outputURL = nil
}
最佳答案
我找不到仅使用 AVCaptureMovieFileOutput
的方法,但是您可以添加额外的照片输出并触发照片,而无需在输出之间切换。
我现在时间不多,但在我可以编辑更多信息之前,这应该可以让你继续。
(请参阅下面的完整实现和有限的力展开的编辑)
首先在 View Controller 中为照片输出设置一个额外的变量
// declare an additional camera output var
var cameraOutput = AVCapturePhotoOutput()
// do this in your 'setupSession' func where you setup your movie output
cameraOutput.isHighResolutionCaptureEnabled = true
captureSession.addOutput(cameraOutput)
声明一个使用 cameraOutput 拍摄照片的函数:
func capturePhoto() {
// create settings for your photo capture
let settings = AVCapturePhotoSettings()
let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!
let previewFormat = [
kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
kCVPixelBufferWidthKey as String: UIScreen.main.bounds.size.width,
kCVPixelBufferHeightKey as String: UIScreen.main.bounds.size.height
] as [String : Any]
settings.previewPhotoFormat = previewFormat
cameraOutput.capturePhoto(with: settings, delegate: self)
}
并符合AVCapturePhotoCaptureDelegate
。
我创建了一个名为 VideoFeed
的单独类来管理视频捕获 session ,因此该示例是该类的扩展。我稍后会更新更多信息。
loadImage(data: Data)
函数调用带有图像的委托(delegate)。如果你直接把它放在你的 View Controller 中,你可以忽略这个调用,并保存或对生成的照片做任何你喜欢的事情:
extension VideoFeed: AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
guard error == nil else {
print("Photo Error: \(String(describing: error))")
return
}
guard let sampleBuffer = photoSampleBuffer,
let previewBuffer = previewPhotoSampleBuffer,
let outputData = AVCapturePhotoOutput
.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) else {
print("Oops, unable to create jpeg image")
return
}
print("captured photo...")
loadImage(data: outputData)
}
func loadImage(data: Data) {
let dataProvider = CGDataProvider(data: data as CFData)
let cgImageRef: CGImage! = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent)
let image = UIImage(cgImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.right)
// do whatever you like with the generated image here...
delegate?.processVideoSnapshot(image)
}
}
编辑:
这是我在测试项目中使用的完整实现。
首先,我将所有 AVFoundation 特定代码移动到它自己的 VideoFeed
类中,并创建了一些对 View Controller 的回调。
这将关注点分开并将 View Controller 的职责限制为:
这是 ViewController 的实现:
ViewController.swift
import UIKit
import AVFoundation
class ViewController: UIViewController, VideoFeedDelegate {
@IBOutlet var cameraView: UIView!
var videoFeed: VideoFeed?
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
// end session
videoFeed?.stopSession()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
// request camera access
AVCaptureDevice.requestAccess(for: AVMediaType.video) { [weak self] granted in
guard granted != false else {
// TODO: show UI stating camera cannot be used, update in settings app...
print("Camera access denied")
return
}
DispatchQueue.main.async {
if self?.videoFeed == nil {
// video access was enabled so setup video feed
self?.videoFeed = VideoFeed(delegate: self)
} else {
// video feed already available, restart session...
self?.videoFeed?.startSession()
}
}
}
}
// MARK: VideoFeedDelegate
func videoFeedSetup(with layer: AVCaptureVideoPreviewLayer) {
// set the layer size
layer.frame = cameraView.layer.bounds
// add to view
cameraView.layer.addSublayer(layer)
}
func processVideoSnapshot(_ image: UIImage?) {
// validate
guard let image = image else {
return
}
// SAVE IMAGE HERE IF DESIRED
// for now just showing in a lightbox/detail view controller
let storyboard = UIStoryboard(name: "Main", bundle: Bundle(for: AppDelegate.self))
let vc = storyboard.instantiateViewController(withIdentifier: "LightboxViewController") as! LightboxViewController
vc.previewImage = image
navigationController?.pushViewController(vc, animated: true)
}
@IBAction func captureButtonTapped(_ sender: Any){
// trigger photo capture from video feed...
// this will trigger a callback to the function above with the captured image
videoFeed?.capturePhoto()
}
}
下面是 VideoFeed
类的完整实现。
使用这种方法可以让您更轻松地在其他项目中重用视频功能,而无需将其与 View Controller 紧密耦合。
VideoFeed.swift
import UIKit
import AVFoundation
/// Defines callbacks associated with the VideoFeed class. Notifies delegate of significant events.
protocol VideoFeedDelegate: class {
/// Callback triggered when the preview layer for this class has been created and configured. Conforming objects should set and maintain a strong reference to this layer otherwise it will be set to nil when the calling function finishes execution.
///
/// - Parameter layer: The video preview layer associated with the active captureSession in the VideoFeed class.
func videoFeedSetup(with layer: AVCaptureVideoPreviewLayer)
/// Callback triggered when a snapshot of the video feed has been generated.
///
/// - Parameter image: <#image description#>
func processVideoSnapshot(_ image: UIImage?)
}
class VideoFeed: NSObject {
// MARK: Variables
/// The capture session to be used in this class.
var captureSession = AVCaptureSession()
/// The preview layer associated with this session. This class has a
/// weak reference to this layer, the delegate (usually a ViewController
/// instance) should add this layer as a sublayer to its preview UIView.
/// The delegate will have the strong reference to this preview layer.
weak var previewLayer: AVCaptureVideoPreviewLayer?
/// The output that handles saving the video stream to a file.
var fileOutput: AVCaptureMovieFileOutput?
/// A reference to the active video input
var activeInput: AVCaptureDeviceInput?
/// Output for capturing frame grabs of video feed
var cameraOutput = AVCapturePhotoOutput()
/// Delegate to receive callbacks about significant events triggered by this class.
weak var delegate: VideoFeedDelegate?
/// The capture connection associated with the fileOutput.
/// Set when fileOutput is created.
var connection : AVCaptureConnection?
// MARK: Public accessors
/// Public initializer. Accepts a delegate to receive callbacks with the preview layer and any snapshot images.
///
/// - Parameter delegate: A reference to an object conforming to VideoFeedDelegate
/// to receive callbacks for significant events in this class.
init(delegate: VideoFeedDelegate?) {
self.delegate = delegate
super.init()
setupSession()
}
/// Public accessor to begin a capture session.
public func startSession() {
guard captureSession.isRunning == false else {
return
}
captureSession.startRunning()
}
/// Public accessor to end the current capture session.
public func stopSession() {
// validate
guard captureSession.isRunning else {
return
}
// end file recording if the session ends and we're currently recording a video to file
if let isRecording = fileOutput?.isRecording, isRecording {
stopRecording()
}
captureSession.stopRunning()
}
/// Public accessor to begin file recording.
public func startRecording() {
guard fileOutput?.isRecording == false else {
stopRecording()
return
}
configureVideoOrientation()
disableSmoothAutoFocus()
guard let url = tempURL() else {
print("Unable to start file recording, temp url generation failed.")
return
}
fileOutput?.startRecording(to: url, recordingDelegate: self)
}
/// Public accessor to end file recording.
public func stopRecording() {
guard fileOutput?.isRecording == true else {
return
}
fileOutput?.stopRecording()
}
/// Public accessor to trigger snapshot capture of video stream.
public func capturePhoto() {
// create settings object
let settings = AVCapturePhotoSettings()
// verify that we have a pixel format type available
guard let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first else {
print("Unable to configure photo capture settings, 'availablePreviewPhotoPixelFormatTypes' has no available options.")
return
}
let screensize = UIScreen.main.bounds.size
// setup format configuration dictionary
let previewFormat: [String : Any] = [
kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
kCVPixelBufferWidthKey as String: screensize.width,
kCVPixelBufferHeightKey as String: screensize.height
]
settings.previewPhotoFormat = previewFormat
// trigger photo capture
cameraOutput.capturePhoto(with: settings, delegate: self)
}
// MARK: Setup functions
/// Handles configuration and setup of the session, inputs, video preview layer and outputs.
/// If all are setup and configured it starts the session.
internal func setupSession() {
captureSession.sessionPreset = AVCaptureSession.Preset.high
guard setupInputs() else {
return
}
setupOutputs()
setupVideoLayer()
startSession()
}
/// Sets up capture inputs for this session.
///
/// - Returns: Returns true if inputs are successfully setup, else false.
internal func setupInputs() -> Bool {
// only need access to this functionality within this function, so declare as sub-function
func addInput(input: AVCaptureInput) {
guard captureSession.canAddInput(input) else {
return
}
captureSession.addInput(input)
}
do {
if let camera = AVCaptureDevice.default(for: AVMediaType.video) {
let input = try AVCaptureDeviceInput(device: camera)
addInput(input: input)
activeInput = input
}
// Setup Microphone
if let microphone = AVCaptureDevice.default(for: AVMediaType.audio) {
let micInput = try AVCaptureDeviceInput(device: microphone)
addInput(input: micInput)
}
return true
} catch {
print("Error setting device video input: \(error)")
return false
}
}
internal func setupOutputs() {
// only need access to this functionality within this function, so declare as sub-function
func addOutput(output: AVCaptureOutput) {
if captureSession.canAddOutput(output) {
captureSession.addOutput(output)
}
}
// file output
let fileOutput = AVCaptureMovieFileOutput()
captureSession.addOutput(fileOutput)
if let connection = fileOutput.connection(with: .video), connection.isVideoStabilizationSupported {
connection.preferredVideoStabilizationMode = .off
self.connection = connection
}
cameraOutput.isHighResolutionCaptureEnabled = true
captureSession.addOutput(cameraOutput)
}
internal func setupVideoLayer() {
let layer = AVCaptureVideoPreviewLayer(session: captureSession)
layer.videoGravity = AVLayerVideoGravity.resizeAspectFill
delegate?.videoFeedSetup(with: layer)
previewLayer = layer
}
// MARK: Helper functions
/// Creates a url in the temporary directory for file recording.
///
/// - Returns: A file url if successful, else nil.
internal func tempURL() -> URL? {
let directory = NSTemporaryDirectory() as NSString
if directory != "" {
let path = directory.appendingPathComponent(NSUUID().uuidString + ".mp4")
return URL(fileURLWithPath: path)
}
return nil
}
/// Disables smooth autofocus functionality on the active device,
/// if the active device is set and 'isSmoothAutoFocusSupported'
/// is supported for the currently set active device.
internal func disableSmoothAutoFocus() {
guard let device = activeInput?.device, device.isSmoothAutoFocusSupported else {
return
}
do {
try device.lockForConfiguration()
device.isSmoothAutoFocusEnabled = false
device.unlockForConfiguration()
} catch {
print("Error disabling smooth autofocus: \(error)")
}
}
/// Sets the current AVCaptureVideoOrientation on the currently active connection if it's supported.
internal func configureVideoOrientation() {
guard let connection = connection, connection.isVideoOrientationSupported,
let currentOrientation = AVCaptureVideoOrientation(rawValue: UIApplication.shared.statusBarOrientation.rawValue) else {
return
}
connection.videoOrientation = currentOrientation
}
}
// MARK: AVCapturePhotoCaptureDelegate
extension VideoFeed: AVCapturePhotoCaptureDelegate {
// iOS 11+ processing
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard error == nil, let outputData = photo.fileDataRepresentation() else {
print("Photo Error: \(String(describing: error))")
return
}
print("captured photo...")
loadImage(data: outputData)
}
// iOS < 11 processing
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
if #available(iOS 11.0, *) {
// use iOS 11-only feature
// nothing to do here as iOS 11 uses the callback above
} else {
guard error == nil else {
print("Photo Error: \(String(describing: error))")
return
}
guard let sampleBuffer = photoSampleBuffer,
let previewBuffer = previewPhotoSampleBuffer,
let outputData = AVCapturePhotoOutput
.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) else {
print("Image creation from sample buffer/preview buffer failed.")
return
}
print("captured photo...")
loadImage(data: outputData)
}
}
/// Creates a UIImage from Data object received from AVCapturePhotoOutput
/// delegate callback and sends to the VideoFeedDelegate for handling.
///
/// - Parameter data: Image data.
internal func loadImage(data: Data) {
guard let dataProvider = CGDataProvider(data: data as CFData), let cgImageRef: CGImage = CGImage(jpegDataProviderSource: dataProvider, decode: nil, shouldInterpolate: true, intent: .defaultIntent) else {
return
}
let image = UIImage(cgImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.right)
delegate?.processVideoSnapshot(image)
}
}
extension VideoFeed: AVCaptureFileOutputRecordingDelegate {
func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {
print("Video recording started: \(fileURL.absoluteString)")
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
guard error == nil else {
print("Error recording movie: \(String(describing: error))")
return
}
UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path, nil, nil, nil)
}
}
对于任何其他使用此功能的人,请不要忘记向您的 info.plist 添加权限以访问相机、照片库和麦克风。
<key>NSCameraUsageDescription</key>
<string>Let us use your camera</string>
<key>NSPhotoLibraryAddUsageDescription</key>
<string>save to images</string>
<key>NSMicrophoneUsageDescription</key>
<string>for sound in video</string>
关于swift - 无需切换即可从 AVCaptureMovieFileOutput 获取图像,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/51828494/
我正在使用 AVCaptureMovieFileOutput 录制视频。然而,我只想保留最后 2 分钟的视频,而不是在整个录制时间内保留捕获的视频。本质上,我想创建一个视频的尾随缓冲区。 我尝试通过将
我使用以下代码来捕获视频并将其保存到我的应用程序的文档文件夹中: AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput
我正在尝试在我的应用程序中实现自定义视频录制和图像捕获。我已经设置了 AVCaptureSession 和所有必要的输入/输出来捕获音频、视频和静态图像。但是,我录制完视频后,输出文件不存在。由于输出
我目前能够将视频录制到 AVCaptureMovieFileOutput 实例,但我找不到任何可以学习的资源,这些资源显示了如何像 snapchat/facebook/instagram 那样播放视频
我想在一系列流畅、可变长度的剪辑中录制视频/音频。也就是说,vid1.mp4 后跟 vid2.mp4 应该无缝连接在一起,或者几乎是无缝连接。 我目前的问题是我似乎无法在不出错的情况下立即切换文件。
我正在开发一个 osx 捕获应用程序,我可以使用 AVFoundation 提供的文件格式 UTIs 来分配文件输出格式,如 .mov、.mp4、 .m4v 等 我可以创建自定义文件格式 UTIs 来
正如标题所述,我希望在用户使用 AVFoundation 录制视频时显示当前的录制长度。我对 AVAudioRecorder 做了类似的事情,我创建了一个计时器来设置标签的文本,如下所示: recor
我能够组装一个最小的屏幕录制 Swift 演示,用于调试另一件事,但有些东西告诉我 - AVFoundation 从未调用我的委托(delegate)进行录制。事实上,他们都不是。 代码非常简单: c
当我的应用程序启动时,我试图使用 AVCaptureMovieFileOutput 将设备相机录制到视频文件中。令我非常沮丧的是,我无法让它工作: 我可以使用 AVCaptureVideoPrevie
当应用程序在视频录制和拍照之间切换时,我想防止延迟:通过仅使用 AVCaptureMovieFileOutput 并在捕获图像时从中获取快照。 就像 SnapChat 一样。 这有可能吗?我还没有找到
我试图阻止 AVCaptureSession 在没有足够的磁盘空间时添加 AVCaptureMovieFileOutput。我在 viewDidload 中使用以下代码进行测试: let sessio
我对 captureStillImageAsynchronouslyFromConnection 有一个奇怪的问题。如果我在镜像视频时使用 jpegStillImageNSDataRepresenta
我尝试了几种不同的方法,但它们对我没有帮助。我想更改 AVFoundation 中的视频方向。我怎样才能做到? override func viewDidLoad() { super.
我尝试使用以下代码从 iphone 相机保存视频文件: class Camera { ... private func loadDeviceInput() { let devices = AV
我正在尝试使用 AVFoundation 在我的 iPhone 应用程序中录制视频。但是每当我单击“录制”按钮时,应用程序就会崩溃并显示此消息 -[AVCaptureMovieFileOutput s
我需要将屏幕视频从 Mac 实时发送到多播组,例如“239.1.1.110:46110”。 我知道 AVCaptureMovieFileOutput 有一个方法“startRecordingToOut
我已经创建了一个电影文件,并且能够将其本地保存到设备上。不过,我想利用 icloud 文档并将其保存到 icloud 并公开共享。如何使用 swift 做到这一点? 我找到了这个用于保存一般文件的链接
目前我正在尝试使用 AVCaptureMovieFileOutput 录制视频并将其保存到照片库。然而,一旦照片库的 URL 传递到 startRecording 函数,它就会声称“在此服务器上找不到
我正在创建一个使用 AVFoundation 的相机应用程序。 当我用我的 AVCaptureStillImageOutput 对象拍照时,它会发出与默认相机相同的“快门”噪音,而无需我告诉 AVCa
用例:我想捕捉来自相机的输入,在捕捉的帧(和声音)之上绘制并将结果保存为 .mov 文件。 我看到我可以使用 AVCaptureSession 捕获相机的输入。 我可以使用 AVCaptureMovi
我是一名优秀的程序员,十分优秀!