- html - 出于某种原因,IE8 对我的 Sass 文件中继承的 html5 CSS 不友好?
- JMeter 在响应断言中使用 span 标签的问题
- html - 在 :hover and :active? 上具有不同效果的 CSS 动画
- html - 相对于居中的 html 内容固定的 CSS 重复背景?
我会回答我自己的问题来分享我的经验,因为互联网上没有完整的工作代码。
iOS 设备通常以 Quicktime 格式以 .mov 文件录制视频。即使输出视频具有 AVC 基线视频编解码器和 AAC 音频编解码器,生成的文件也将位于 Quicktime 容器中。而且这些视频可能无法在 Android 设备上播放。 Apple 有 AvFoundation 类,如 AvCaptureSession 和 AVCaptureMovieFileOutput,但它们不直接支持 mp4 文件输出。如何在 mpeg4 容器中录制实际的 mp4 视频并支持 swift 和 IOS 8?
最佳答案
首先要做的事情:这可能不是最好的解决方案,但这是一个完整的解决方案。
下面的代码使用 AvCaptureSession 捕获视频和音频,并使用 AvExportSession 将其转换为 mpeg4。还有放大、缩小和切换相机功能以及权限检查。您可以以 480p 或 720p 录制。您还可以设置最小和最大帧速率来创建较小的视频。希望这可以作为完整的指南有所帮助。
注意:需要添加到 info.plist 中以请求相机和照片库权限的 key :
<key>NSCameraUsageDescription</key>
<string>Yo, this is a cam app.</string>
<key>NSPhotoLibraryUsageDescription</key>
<string>Yo, i need to access your photos.</string>
<key>NSMicrophoneUsageDescription</key>
<string>Yo, i can't hear you</string>
代码:
import UIKit
import Photos
import AVFoundation
class VideoAct: UIViewController, AVCaptureFileOutputRecordingDelegate
{
let captureSession : AVCaptureSession = AVCaptureSession()
var captureDevice : AVCaptureDevice!
var microphone : AVCaptureDevice!
var previewLayer : AVCaptureVideoPreviewLayer!
let videoFileOutput : AVCaptureMovieFileOutput = AVCaptureMovieFileOutput()
var duration : Int = 30
var v_path : URL = URL(fileURLWithPath: "")
var my_timer : Timer = Timer()
var cameraFront : Bool = false
var cameras_number : Int = 0
var max_zoom : CGFloat = 76
var devices : [AVCaptureDevice] = []
var captureInput : AVCaptureDeviceInput = AVCaptureDeviceInput()
var micInput : AVCaptureDeviceInput = AVCaptureDeviceInput()
@IBOutlet weak var cameraView: UIView!
override func viewDidLoad()
{
super.viewDidLoad()
if (check_permissions())
{
initialize()
}
else
{
AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeVideo, completionHandler: { (granted) in
if (granted)
{
self.initialize()
}
else
{
self.dismiss(animated: true, completion: nil)
}
})
}
}
func check_permissions() -> Bool
{
return AVCaptureDevice.authorizationStatus(forMediaType: AVMediaTypeVideo) == AVAuthorizationStatus.authorized
}
@available(iOS 4.0, *)
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!)
{
//you can implement stopvideoaction here if you want
}
func initialize()
{
let directory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
v_path = directory.appendingPathComponent("temp_video.mp4")
// we just set the extension .mp4 but
// actually it is a mov file with QT container !! May not play in Android devices.
// it will be ceonverted
self.duration = 30
devices = AVCaptureDevice.devices() as! [AVCaptureDevice]
for device in devices
{
if (device.hasMediaType(AVMediaTypeVideo))
{
if (device.position == AVCaptureDevicePosition.back)
{
captureDevice = device as AVCaptureDevice
}
if (device.position == AVCaptureDevicePosition.front)
{
cameras_number = 2
}
}
if (device.hasMediaType(AVMediaTypeAudio))
{
microphone = device as AVCaptureDevice
}
}
if (cameras_number == 1)
{
//only 1 camera available
btnSwitchCamera.isHidden = true
}
if captureDevice != nil
{
beginSession()
}
max_zoom = captureDevice.activeFormat.videoMaxZoomFactor
}
func beginSession()
{
if (captureSession.isRunning)
{
captureSession.stopRunning()
}
do
{
try captureInput = AVCaptureDeviceInput(device: captureDevice)
try micInput = AVCaptureDeviceInput(device: microphone)
try captureDevice.lockForConfiguration()
}
catch
{
print("errorrrrrrrrrrr \(error)")
}
// beginconfig before adding input and setting settings
captureSession.beginConfiguration()
captureSession.addInput(captureInput)
captureSession.addInput(micInput)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.init(rawValue: UIDevice.current.orientation.rawValue)!
if (previewLayer.connection.isVideoStabilizationSupported)
{
previewLayer.connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.auto
}
if (captureDevice.isSmoothAutoFocusSupported)
{
captureDevice.isSmoothAutoFocusEnabled = false
}
if (captureDevice.isFocusModeSupported(AVCaptureFocusMode.continuousAutoFocus))
{
captureDevice.focusMode = .continuousAutoFocus
}
set_preview_size_thing()
set_quality_thing()
if (captureDevice.isLowLightBoostSupported)
{
captureDevice.automaticallyEnablesLowLightBoostWhenAvailable = true
}
if (cameraView.layer.sublayers?[0] is AVCaptureVideoPreviewLayer)
{
//to prevent previewlayers stacking on every camera switch
cameraView.layer.sublayers?.remove(at: 0)
}
cameraView.layer.insertSublayer(previewLayer, at: 0)
previewLayer?.frame = cameraView.layer.frame
captureSession.commitConfiguration()
captureSession.startRunning()
}
func duration_thing()
{
// there is a textview to write remaining time left
self.duration = self.duration - 1
timerTextView.text = "remaining seconds: \(self.duration)"
timerTextView.sizeToFit()
if (self.duration == 0)
{
my_timer.invalidate()
stopVideoAction()
}
}
func switch_cam()
{
captureSession.removeInput(captureInput)
captureSession.removeInput(micInput)
cameraFront = !cameraFront
// capturedevice will be locked again
captureDevice.unlockForConfiguration()
for device in devices
{
if (device.hasMediaType(AVMediaTypeVideo))
{
if (device.position == AVCaptureDevicePosition.back && !cameraFront)
{
captureDevice = device as AVCaptureDevice
}
else if (device.position == AVCaptureDevicePosition.front && cameraFront)
{
captureDevice = device as AVCaptureDevice
}
}
}
beginSession()
}
func zoom_in()
{
// 10x zoom would be enough
if (captureDevice.videoZoomFactor * 1.5 < 10)
{
captureDevice.videoZoomFactor = captureDevice.videoZoomFactor * 1.5
}
else
{
captureDevice.videoZoomFactor = 10
}
}
func zoom_out()
{
if (captureDevice.videoZoomFactor * 0.67 > 1)
{
captureDevice.videoZoomFactor = captureDevice.videoZoomFactor * 0.67
}
else
{
captureDevice.videoZoomFactor = 1
}
}
func set_quality_thing()
{
// there is a switch in the screen (30-30 fps high quality or 15-23 fps normal quality)
// you may not have to do this because export session also has some presets and a property called “optimizefornetwork” or something. But it would be better to make sure the output file is not huge with unnecessary 90 fps video
captureDevice.activeVideoMinFrameDuration = CMTimeMake(1, switch_quality.isOn ? 30 : 15)
captureDevice.activeVideoMaxFrameDuration = CMTimeMake(1, switch_quality.isOn ? 30 : 23)
}
func set_preview_size_thing()
{
//there is a switch for resolution (720p or 480p)
captureSession.sessionPreset = switch_res.isOn ? AVCaptureSessionPreset1280x720 : AVCaptureSessionPreset640x480
//this for loop is probably unnecessary and ridiculous but you can make sure you are using the right format
for some_format in captureDevice.formats as! [AVCaptureDeviceFormat]
{
let some_desc : String = String(describing: some_format)
if (switch_res.isOn)
{
if (some_desc.contains("1280x") && some_desc.contains("720") && some_desc.contains("420v") && some_desc.contains("30 fps"))
{
captureDevice.activeFormat = some_format
break
}
}
else
{
if (some_desc.contains("640x") && some_desc.contains("480") && some_desc.contains("420v"))
{
captureDevice.activeFormat = some_format
break
}
}
}
}
func takeVideoAction()
{
// movieFragmentInterval is important !! or you may end up with a video without audio
videoFileOutput.movieFragmentInterval = kCMTimeInvalid
captureSession.addOutput(videoFileOutput)
(videoFileOutput.connections.first as! AVCaptureConnection).videoOrientation = returnedOrientation()
videoFileOutput.maxRecordedDuration = CMTime(seconds: Double(self.duration), preferredTimescale: 1)
videoFileOutput.startRecording(toOutputFileURL: v_path, recordingDelegate: self)
//timer will tell the remaining time
my_timer = Timer.scheduledTimer(timeInterval: 1, target: self, selector: #selector(duration_thing), userInfo: nil, repeats: true)
}
func stopVideoAction()
{
captureDevice.unlockForConfiguration()
videoFileOutput.stopRecording()
captureSession.stopRunning()
// turn temp_video into an .mpeg4 (mp4) video
let directory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let avAsset = AVURLAsset(url: v_path, options: nil)
// there are other presets than AVAssetExportPresetPassthrough
let exportSession = AVAssetExportSession(asset: avAsset, presetName: AVAssetExportPresetPassthrough)!
exportSession.outputURL = directory.appendingPathComponent("main_video.mp4")
// now it is actually in an mpeg4 container
exportSession.outputFileType = AVFileTypeMPEG4
let start = CMTimeMakeWithSeconds(0.0, 0)
let range = CMTimeRangeMake(start, avAsset.duration)
exportSession.timeRange = range
exportSession.exportAsynchronously(completionHandler: {
if (exportSession.status == AVAssetExportSessionStatus.completed)
{
// you don’t need temp video after exporting main_video
do
{
try FileManager.default.removeItem(atPath: self.v_path.path)
}
catch
{
}
// v_path is now points to mp4 main_video
self.v_path = directory.appendingPathComponent("main_video.mp4")
self.performSegue(withIdentifier: "ShareVideoController", sender: nil)
}
})
}
func btn_capture_click_listener()
{
if (videoFileOutput.isRecording)
{
stopVideoAction()
}
else
{
takeVideoAction()
}
}
func returnedOrientation() -> AVCaptureVideoOrientation
{
var videoOrientation: AVCaptureVideoOrientation!
let orientation = UIDevice.current.orientation
switch orientation
{
case .landscapeLeft:
videoOrientation = .landscapeRight
case .landscapeRight:
videoOrientation = .landscapeLeft
default:
videoOrientation = .landscapeLeft
}
return videoOrientation
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?)
{
if (segue.identifier == "ShareVideoController")
{
//to make it visible in the camera roll (main_video.mp4)
PHPhotoLibrary.shared().performChanges({PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: self.v_path)}) { completed, error in}
let destVC : ShareVideoController = segue.destination as! ShareVideoController
// use the path in other screen to upload it or whatever
destVC.videoFilePath = v_path
// bla bla
}
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask
{
// screen will always be in landscape (remove this override if you want)
return .landscape
}
}
关于ios - 我如何使用 AVCaptureSession 和 swift (IOS 8)在 mpeg4 容器中录制 mp4 视频,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/47994220/
我正在使用以下代码播放声音,过一会儿它将停止播放声音,这是因为我相信有太多的Mediaplayer打开实例,所以我添加了一个额外的mp.release();,这只会使我的应用程序崩溃(目前已被注释掉)
我正在查看 XV-6 代码,它通过它识别 MP 结构。它首先在 EBDA 的第一个 kb 中搜索。代码是这样的 static struct mp* mpsearch(void) { uchar *
我在我的应用程序中使用 Mp 饼图。它显示非常小的尺寸,我试图增加它的尺寸但它没有增加它的尺寸。我无法找出问题所在。请告诉我们如何增加尺寸。 这是我的代码: public class MPpiecha
如何使用 MPAndroidChart 实现此目的? 使用版本:com.github.PhilJay:MPAndroidChart:v3.1.0-alpha 添加图例和饼图边距的代码: private
亲爱的社区,我面临以下问题,我正在使用此处提供的 MP android 图表库创建条形图:https://github.com/PhilJay/MPAndroidChart . 我想为我的条设置渐变背
我正在使用 SAS MP Connect 开发我的第一段代码,以运行同一个 sas 作业的并行线程。 我知道 MP CONNECT 仅受可用 CPU 数量的物理限制,但理想情况下我不想在我的工作中使用
我最近购买了在 Linux 服务器上运行的 Stata MP12(8 核)许可证。 有没有人写过 Stata 程序,比如说模拟研究来测试 Stata MP 的性能?我想监视在作业处理过程中实际使用的内
我将不胜感激任何“一步一步”指南,说明如何更改 PHP/HTML 页面上的动态数据库连接/连接字符串/等上的代码,使其“即插即用”工作通过 ftp 将页面和 MySQL 数据库托管在“Azure 网站
试图在我的应用程序中放置一个“暂停”按钮,以播放一些声音片段循环播放。 当我打电话mp.pause();一切都破了,我完全迷路了! 这是我正在使用的方法。 protected void man
我想使用 Mp Chart 创建折线图 我想要实现的是这张图片 但是到目前为止我已经得到了这个。 我使用的代码是这个 private fun setData() { val entries
通常,我可能会编写一个类似simd的循环: float * x = (float *) malloc(10 * sizeof(float)); float * y = (float *) malloc
在与堆栈空间、OpenMP 以及如何处理这些问题相关的其他帖子上,有很多回复。但是,我找不到信息来真正理解 OpenMP 调整编译器选项的原因: 原因是什么-fopenmp在 gfortran 中暗示
我有一段代码,可以根据漂移、波动性和随机数计算任意给定日期的股票价格。但是当我检查输出列表时 - 它们是算术级数,而不是几何级数(幂函数)。我共享的变量有问题吗? 代码如下: #include #i
我正在尝试在 C++11 中并行化动态编程算法使用这种方法: void buildBaseCases() { cout << "Building base cases" << endl
我正在 open MP 中实现并行点积 我有这个代码: #include #include #include #include #include #include #define SIZE
我有一台服务器已经将近 4 年了,直到现在我都没有遇到任何问题(主机端)。我一直在更换主机,因为 ddos 的东西试图找到最适合我的东西。现在我买了一个 VPS(这不是我的第一个)并尝试运行我的服
所以我有两个内部平行区域的外部平行区域。是否可以将 2 个线程放入外部平行线,将 4 个线程放入每个内部平行线?我做了这样的东西,但它似乎无法按照我想要的方式工作。有什么建议吗? start_r =
我希望有人指出我们遇到的问题或解决方法。 使用/MP 编译项目时,似乎只有同一文件夹中的文件会同时编译。我使用 Process Explorer 滑动命令行并确认行为。 项目过滤器似乎对并发编译的内容
本文整理了Java中me.chanjar.weixin.mp.api.WxMpMessageRouter类的一些代码示例,展示了WxMpMessageRouter类的具体用法。这些代码示例主要来源于G
我正在监视 Stata/MP(Stata/SE 的多核版本)的 CPU 和内存使用情况,但我不是 Stata 程序员(更像是 Perl 人)。 任何人都可以发布一些代码,利用公共(public)数据集
我是一名优秀的程序员,十分优秀!