- android - RelativeLayout 背景可绘制重叠内容
- android - 如何链接 cpufeatures lib 以获取 native android 库?
- java - OnItemClickListener 不起作用,但 OnLongItemClickListener 在自定义 ListView 中起作用
- java - Android 文件转字符串
我正在尝试制作类似 SnapChat 的应用程序。通过同一个按钮,我愿意允许用户拍照(在内部修饰)和录制视频(长按)。
我正在为此使用 AVFoundation。棘手的部分是我不能让它在同一个 AVCaptureSession 中正常工作。我的意思是,两次捕获我只有 1 个预览层,如何根据用户与记录按钮的交互启动正确的层?有人已经使用过类似的东西吗?
这是我的一段代码:
import UIKit
import AVFoundation
protocol RecordCameraDelegate {
func didSavedOutputFile(url: URL!, error: Error?)
func didSavedImage(image: UIImage?)
}
// MARK: - Camera
class RecordCamera : NSObject {
var videoLayer : AVCaptureVideoPreviewLayer!
var delegate : RecordCameraDelegate!
var capturedPhoto : UIImage?
fileprivate var captureSession = AVCaptureSession()
fileprivate var photoSession = AVCaptureSession()
fileprivate var movieOutput = AVCaptureMovieFileOutput()
fileprivate var cameraDevice : AVCaptureDevicePosition!
fileprivate let stillImageOutput = AVCaptureStillImageOutput()
// Devices
fileprivate lazy var frontCameraDevice: AVCaptureDevice? = {
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice]
return devices.filter { $0.position == .front }.first
}()
fileprivate lazy var backCameraDevice: AVCaptureDevice? = {
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice]
return devices.filter { $0.position == .back }.first
}()
fileprivate lazy var micDevice: AVCaptureDevice? = {
return AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
}()
fileprivate var tempFilePath: URL = {
let tempPath = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("bighug").appendingPathExtension("mp4").absoluteString
if FileManager.default.fileExists(atPath: tempPath) {
do {
try FileManager.default.removeItem(atPath: tempPath)
} catch let error { print("Can't create File URL: \(String(describing: error))") }
}
return URL(string: tempPath)!
}()
// MARK: - Initialization
init(view: UIView, cameraPosition: AVCaptureDevicePosition = .front) {
super.init()
cameraDevice = cameraPosition
// Video
self.configureToRecord(view: view)
// Photo
self.configureToCapturePhoto()
}
func configureToRecord(view: UIView? = nil) {
captureSession.beginConfiguration()
defer {
// commit & stop session
captureSession.commitConfiguration()
if !captureSession.isRunning { captureSession.startRunning() }
}
captureSession.sessionPreset = AVCaptureSessionPresetHigh
// Start configuration
if !captureSession.isRunning {
// layer
if let validView = view {
videoLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
videoLayer.frame = validView.bounds
validView.layer.addSublayer(videoLayer)
}
// add device inputs (front camera and mic)
if cameraDevice == .front {
captureSession.addInput(deviceInputFrom(device: frontCameraDevice))
} else {
captureSession.addInput(deviceInputFrom(device: backCameraDevice))
}
}
captureSession.addInput(deviceInputFrom(device: micDevice))
// Output
movieOutput.movieFragmentInterval = kCMTimeInvalid
// Remove previous output
if let existingOutput = captureSession.outputs.first as? AVCaptureOutput {
captureSession.removeOutput(existingOutput)
}
// Add Movie Output
if captureSession.canAddOutput(movieOutput) {
captureSession.addOutput(movieOutput)
}
}
func configureToCapturePhoto() {
photoSession.beginConfiguration()
defer { photoSession.commitConfiguration() }
photoSession.sessionPreset = AVCaptureSessionPresetPhoto
stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG]
if #available(iOS 10.0, *) {
let cameraOutput = AVCapturePhotoOutput()
// Add Photo Output
if photoSession.canAddOutput(cameraOutput) {
photoSession.addOutput(cameraOutput)
}
}
else {
// Add Photo Output
if photoSession.canAddOutput(stillImageOutput) {
photoSession.addOutput(stillImageOutput)
}
}
}
func takePicture() {
if #available(iOS 10.0, *) {
let cameraOutput = photoSession.outputs.first as! AVCapturePhotoOutput
// Capture Picture
let settings = AVCapturePhotoSettings()
let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!
let previewFormat = [
kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
kCVPixelBufferWidthKey as String: 828,
kCVPixelBufferHeightKey as String: 828
]
settings.previewPhotoFormat = previewFormat
cameraOutput.capturePhoto(with: settings, delegate: self)
}
else {
if let videoConnection = stillImageOutput.connection(withMediaType: AVMediaTypeVideo) {
stillImageOutput.captureStillImageAsynchronously(from: videoConnection) { (imageDataSampleBuffer, error) -> Void in
let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer)
//UIImageWriteToSavedPhotosAlbum(UIImage(data: imageData!)!, nil, nil, nil)
guard let validData = imageData else { self.delegate?.didSavedImage(image: nil); return }
self.capturedPhoto = UIImage(data: validData)
}
}
}
}
// MARK: - Record Methods
func startRecording() {
// Take picture
print("Camera started recording")
self.takePicture()
// Start recording
movieOutput.startRecording(
toOutputFileURL: tempFilePath,
recordingDelegate: self
)
}
func stopRecording() {
print("Camera stopped recording")
movieOutput.stopRecording()
}
// MARK: - Modes
func cameraMode() {
captureSession.beginConfiguration()
defer { captureSession.commitConfiguration() }
let inputs: [AVCaptureInput] = captureSession.inputs?.flatMap { $0 as? AVCaptureInput } ?? []
// From
if cameraDevice == .front {
if let validFrontDevice = deviceInputFrom(device: frontCameraDevice) {
if !inputs.contains(validFrontDevice) {
captureSession.addInput(validFrontDevice)
}
}
}
// Back
if cameraDevice == .back {
if let validBackDevice = deviceInputFrom(device: backCameraDevice) {
if !inputs.contains(validBackDevice) {
captureSession.addInput(validBackDevice)
}
}
}
print("Record Camera --> Set VIDEO Mode")
}
func audioMode() {
captureSession.beginConfiguration()
defer { captureSession.commitConfiguration() }
let inputs: [AVCaptureInput] = captureSession.inputs?.flatMap { $0 as? AVCaptureInput } ?? []
// Remove..
for input in inputs {
if let deviceInput = input as? AVCaptureDeviceInput {
if deviceInput.device == backCameraDevice
|| deviceInput.device == frontCameraDevice {
captureSession.removeInput(deviceInput)
}
}
}
print("Record Camera --> Set AUDIO Mode")
}
// MARK: - Util methods
fileprivate func deviceInputFrom(device: AVCaptureDevice?) -> AVCaptureDeviceInput? {
guard let validDevice = device else { return nil }
do {
return try AVCaptureDeviceInput(device: validDevice)
} catch let outError {
print("Device setup error occured: \(String(describing: outError))")
return nil
}
}
func swipeCamera() {
cameraDevice = cameraDevice == .front ? .back : .front
captureSession.beginConfiguration()
defer { captureSession.commitConfiguration() }
let inputs: [AVCaptureInput] = captureSession.inputs?.flatMap { $0 as? AVCaptureInput } ?? []
// Remove...
for input in inputs {
if let deviceInput = input as? AVCaptureDeviceInput {
if deviceInput.device == backCameraDevice && cameraDevice == .front {
captureSession.removeInput(deviceInput)
photoSession.removeInput(deviceInput)
break;
} else if deviceInput.device == frontCameraDevice && cameraDevice == .back {
captureSession.removeInput(deviceInput)
photoSession.removeInput(deviceInput)
break;
}
}
}
// From
if cameraDevice == .front {
if let validFrontDevice = deviceInputFrom(device: frontCameraDevice) {
if !inputs.contains(validFrontDevice) {
captureSession.addInput(validFrontDevice)
photoSession.addInput(validFrontDevice)
print("Record Camera --> Swipe to Front Camera")
}
}
}
// Back
if cameraDevice == .back {
if let validBackDevice = deviceInputFrom(device: backCameraDevice) {
if !inputs.contains(validBackDevice) {
captureSession.addInput(validBackDevice)
photoSession.addInput(validBackDevice)
print("Record Camera --> Swipe to Back Camera")
}
}
}
}
}
// MARK: - Capture Output
extension RecordCamera : AVCaptureFileOutputRecordingDelegate {
func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
// Not implemented
}
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
guard error == nil else {
if let photo = capturedPhoto {
delegate?.didSavedImage(image: photo)
}
return
}
delegate?.didSavedOutputFile(url: outputFileURL, error: error)
}
}
@available(iOS 10.0, *)
extension RecordCamera : AVCapturePhotoCaptureDelegate {
func capture(_ captureOutput: AVCapturePhotoOutput, didCapturePhotoForResolvedSettings resolvedSettings: AVCaptureResolvedPhotoSettings) {
print("Picture taken")
}
func capture(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhotoSampleBuffer photoSampleBuffer: CMSampleBuffer?, previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
guard error == nil else {
print("Failed Capturing Picture: \(String(describing: error!.localizedDescription))")
capturedPhoto = nil
//self.delegate.didSavedImage(image: nil)
return
}
if let sampleBuffer = photoSampleBuffer, let previewBuffer = previewPhotoSampleBuffer,
let imageData = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) {
print("Photo Saved!")
capturedPhoto = UIImage(data: imageData)
//self.delegate.didSavedImage(image: image)
}
}
}
最佳答案
我做了几乎与您需要的相同的功能。我已经创建并配置了一个捕获 session 。对于视频输出,我使用了 AVCaptureVideoDataOutput 类,对于音频,我使用了 AVCaptureAudioDataOutput 类,对于照片,我使用了 AVCaptureStillImageOutput。
我使用 AVAssetWriter
来录制视频和音频,因为我需要执行自定义视频操作。录制在AVCaptureVideoDataOutputSampleBufferDelegate
方法。该委托(delegate)方法如下所示。
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
if !isRecordingVideo {
return
}
if captureOutput == self.videoOutput {
assetVideoWriterQueue.async {
if self.shouldStartWritingSession {
self.assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
self.shouldStartWritingSession = false
}
if self.assetWriterInputCamera.isReadyForMoreMediaData {
self.assetWriterInputCamera.append(sampleBuffer)
}
}
}
if captureOutput == self.audioOutput {
assetAudioWriterQueue.async {
let shouldStartWritingSession = self.shouldStartWritingSession
if self.assetWriterInputMicrofone.isReadyForMoreMediaData && shouldStartWritingSession == false {
self.assetWriterInputMicrofone.append(sampleBuffer)
}
if shouldStartWritingSession {
print("In audioOutput and CANNOT Record")
}
}
}
}
我的静态图像捕捉是这样的:
func captureStillImage(_ completion: @escaping ((Bool, UIImage?) -> Void)) {
guard self.state == .running else {
completion(false, nil)
return
}
backgroundQueue.async {
let connection = self.stillImageOutpup.connection(withMediaType: AVMediaTypeVideo)
self.stillImageOutpup.captureStillImageAsynchronously(from: connection, completionHandler: { (buffer, error) in
defer {
self.state = .running
}
guard let buffer = buffer, let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer) else {
DispatchQueue.main.async {
completion(false, nil)
}
return
}
let image = UIImage(data: imageData)
DispatchQueue.main.async {
completion(true, image)
}
})
}
}
您可以了解如何在 StackOverflow 上使用 Assets 编写器。例如,您可能熟悉 this
关于ios - 如何从同一个 AVCaptureSession 捕获照片和视频?,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/44488635/
IO 设备如何知道属于它的内存中的值在memory mapped IO 中发生了变化? ? 例如,假设内存地址 0 专用于保存 VGA 设备的背景颜色。当我们更改 memory[0] 中的值时,VGA
我目前正在开发一个使用Facebook sdk登录(通过FBLoginView)的iOS应用。 一切正常,除了那些拥有较旧版本的facebook的人。 当他们按下“使用Facebook登录”按钮时,他
假设我有: this - is an - example - with some - dashesNSRange将使用`rangeOfString:@“-”拾取“-”的第一个实例,但是如果我只想要最后
Card.io SDK提供以下详细信息: 卡号,有效期,月份,年份,CVV和邮政编码。 如何从此SDK获取国家名称。 - (void)userDidProvideCreditCardInfo:(Car
iOS 应用程序如何从网络服务下载图片并在安装过程中将它们安装到用户的 iOS 设备上?可能吗? 最佳答案 您无法控制应用在用户设备上的安装,因此无法在安装过程中下载其他数据。 只需在安装后首次启动应
我曾经开发过一款企业版 iOS 产品,我们公司曾将其出售给大型企业,供他们的员工使用。 该应用程序通过 AppStore 提供,企业用户获得了公司特定的配置文件(包含应用程序配置文件)以启用他们有权使
我正在尝试将 Card.io SDK 集成到我的 iOS 应用程序中。我想为 CardIO ui 做一个简单的本地化,如更改取消按钮标题或“在此保留信用卡”提示文本。 我在 github 上找到了这个
我正在使用 CardIOView 和 CardIOViewDelegate 类,没有可以设置为 YES 的 BOOL 来扫描 collectCardholderName。我可以看到它在 CardIOP
我有一个集成了通话工具包的 voip 应用程序。每次我从我的 voip 应用程序调用时,都会在 native 电话应用程序中创建一个新的最近通话记录。我在 voip 应用程序中也有自定义联系人(电话应
iOS 应用程序如何知道应用程序打开时屏幕上是否已经有键盘?应用程序运行后,它可以接收键盘显示/隐藏通知。但是,如果应用程序在分屏模式下作为辅助应用程序打开,而主应用程序已经显示键盘,则辅助应用程序不
我在模拟器中收到以下错误: ImageIO: CGImageReadSessionGetCachedImageBlockData *** CGImageReadSessionGetCachedIm
如 Apple 文档所示,可以通过 EAAccessory Framework 与经过认证的配件(由 Apple 认证)进行通信。但是我有点困惑,因为一些帖子告诉我它也可以通过 CoreBluetoo
尽管现在的调试器已经很不错了,但有时找出应用程序中正在发生的事情的最好方法仍然是古老的 NSLog。当您连接到计算机时,这样做很容易; Xcode 会帮助弹出日志查看器面板,然后就可以了。当您不在办公
在我的 iOS 应用程序中,我定义了一些兴趣点。其中一些有一个 Kontakt.io 信标的名称,它绑定(bind)到一个特定的 PoI(我的意思是通常贴在信标标签上的名称)。现在我想在附近发现信标,
我正在为警报提示创建一个 trigger.io 插件。尝试从警报提示返回数据。这是我的代码: // Prompt + (void)show_prompt:(ForgeTask*)task{
您好,我是 Apple iOS 的新手。我阅读并搜索了很多关于推送通知的文章,但我没有发现任何关于 APNS 从 io4 到 ios 6 的新更新的信息。任何人都可以向我提供 APNS 如何在 ios
UITabBar 的高度似乎在 iOS 7 和 8/9/10/11 之间发生了变化。我发布这个问题是为了让其他人轻松找到答案。 那么:在 iPhone 和 iPad 上的 iOS 8/9/10/11
我想我可以针对不同的 iOS 版本使用不同的 Storyboard。 由于 UI 的差异,我将创建下一个 Storyboard: Main_iPhone.storyboard Main_iPad.st
我正在写一些东西,我将使用设备的 iTunes 库中的一部分音轨来覆盖 2 个视频的组合,例如: AVMutableComposition* mixComposition = [[AVMutableC
我创建了一个简单的 iOS 程序,可以顺利编译并在 iPad 模拟器上运行良好。当我告诉 XCode 4 使用我连接的 iPad 设备时,无法编译相同的程序。问题似乎是当我尝试使用附加的 iPad 时
我是一名优秀的程序员,十分优秀!