- Java 双重比较
- java - 比较器与 Apache BeanComparator
- Objective-C 完成 block 导致额外的方法调用?
- database - RESTful URI 是否应该公开数据库主键?
我要求从文档目录加载所有视频,然后合并成单个视频。然后将视频上传到服务器。但是这段代码只合并了两个视频。我如何合并多个视频。
func mergeVideo(index:Int){
print("index:\(index)")
let item:AVPlayerItem!
Utility.showActivityIndicator()
var url = String()
var url2 = String()
url = fileManager.appending(videoFileNames[index] + ".MOV")
url2 = fileManager.appending(videoFileNames[index+1] + ".MOV")
let avAsset = AVAsset(url:NSURL(fileURLWithPath: url) as URL)
let avAsset2 = AVAsset(url:NSURL(fileURLWithPath: url2) as URL)
firstAsset = avAsset
secondAsset = avAsset2
if let firstAsset = firstAsset, let secondAsset = secondAsset {
// 1 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
let mixComposition = AVMutableComposition()
// 2 - Create two video tracks
let firstTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
do {
try firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, firstAsset.duration), of: firstAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: kCMTimeZero)
} catch _ {
print("Failed to load first track")
}
let secondTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
do {
try secondTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, secondAsset.duration), of: secondAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: firstAsset.duration)
} catch _ {
print("Failed to load second track")
}
// 2.1
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration))
// 2.2
let firstInstruction = videoCompositionInstructionForTrack(firstTrack, asset: firstAsset)
firstInstruction.setOpacity(0.0, at: firstAsset.duration)
let secondInstruction = videoCompositionInstructionForTrack(secondTrack, asset: secondAsset)
// 2.3
mainInstruction.layerInstructions = [firstInstruction, secondInstruction]
let mainComposition = AVMutableVideoComposition()
mainComposition.instructions = [mainInstruction]
mainComposition.frameDuration = CMTimeMake(1, 30)
mainComposition.renderSize = CGSize(width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.height)
item = AVPlayerItem(asset: mixComposition)
item.videoComposition = mainComposition
// 3 -Code for Audio track
/*
if let loadedAudioAsset = audioAsset {
let audioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: 0)
do {
try audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration)),
of: loadedAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0] ,
at: kCMTimeZero)
} catch _ {
print("Failed to load Audio track")
}
}*/
// 4 - Get path
let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
let dateFormatter = DateFormatter()
dateFormatter.dateStyle = .long
dateFormatter.timeStyle = .short
let date = dateFormatter.string(from: Date())
let savePath = (documentDirectory as NSString).appendingPathComponent("mergeVideo-\(date).mov")
MergeURL = URL(fileURLWithPath: savePath)
// 5 - Create Exporter
guard let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) else { return }
exporter.outputURL = MergeURL
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.shouldOptimizeForNetworkUse = true
exporter.videoComposition = mainComposition
// 6 - Perform the Export
exporter.exportAsynchronously() {
DispatchQueue.main.async { _ in
self.exportDidFinish(exporter)
}
}
}
}
func orientationFromTransform(_ transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
var assetOrientation = UIImageOrientation.up
var isPortrait = false
if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
assetOrientation = .right
isPortrait = true
} else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
assetOrientation = .left
isPortrait = true
} else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
assetOrientation = .up
} else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
assetOrientation = .down
}
return (assetOrientation, isPortrait)
}
func videoCompositionInstructionForTrack(_ track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
let assetTrack = asset.tracks(withMediaType: AVMediaTypeVideo)[0]
let transform = assetTrack.preferredTransform
let assetInfo = orientationFromTransform(transform)
var scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.width
if assetInfo.isPortrait {
scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.height
let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
instruction.setTransform(assetTrack.preferredTransform.concatenating(scaleFactor),
at: kCMTimeZero)
} else {
let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
var concat = assetTrack.preferredTransform.concatenating(scaleFactor).concatenating(CGAffineTransform(translationX: 0, y: UIScreen.main.bounds.width / 2))
if assetInfo.orientation == .down {
let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(M_PI))
let windowBounds = UIScreen.main.bounds
let yFix = assetTrack.naturalSize.height + windowBounds.height
let centerFix = CGAffineTransform(translationX: assetTrack.naturalSize.width, y: yFix)
concat = fixUpsideDown.concatenating(centerFix).concatenating(scaleFactor)
}
instruction.setTransform(concat, at: kCMTimeZero)
}
return instruction
}
最佳答案
func mergeVideo(_ mAssetsList: [AVAsset]) {
let mainComposition = AVMutableVideoComposition()
var startDuration:CMTime = kCMTimeZero
let mainInstruction = AVMutableVideoCompositionInstruction()
let mixComposition = AVMutableComposition()
var allVideoInstruction = [AVMutableVideoCompositionLayerInstruction]()
var assets = mAssetsList
var strCaption = EMPTY_STRING
for i in 0 ..< assets.count {
let currentAsset:AVAsset = assets[i] //Current Asset.
let currentTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
do {
try currentTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero,
currentAsset.duration), of: currentAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: startDuration)
//Creates Instruction for current video asset.
let currentInstruction:AVMutableVideoCompositionLayerInstruction = videoCompositionInstructionForTrack(currentTrack, asset: currentAsset)
currentInstruction.setOpacityRamp(fromStartOpacity: 0.0,
toEndOpacity: 1.0,
timeRange:CMTimeRangeMake(
startDuration,
CMTimeMake(1, 1)))
if i != assets.count - 1 {
//Sets Fade out effect at the end of the video.
currentInstruction.setOpacityRamp(fromStartOpacity: 1.0,
toEndOpacity: 0.0,
timeRange:CMTimeRangeMake(
CMTimeSubtract(
CMTimeAdd(currentAsset.duration, startDuration),
CMTimeMake(1, 1)),
CMTimeMake(2, 1)))
}
let transform:CGAffineTransform = currentTrack.preferredTransform
if orientationFromTransform(transform).isPortrait {
let outputSize:CGSize = CGSize(width: VIDEO_WIDTH, height: VIDEO_HEIGHT)
let horizontalRatio = CGFloat(outputSize.width) / currentTrack.naturalSize.width
let verticalRatio = CGFloat(outputSize.height) / currentTrack.naturalSize.height
let scaleToFitRatio = max(horizontalRatio, verticalRatio) // ScaleAspectFill
let FirstAssetScaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
if currentAsset.g_orientation == .landscapeLeft {
let rotation = CGAffineTransform(rotationAngle: .pi)
let translateToCenter = CGAffineTransform(translationX: VIDEO_WIDTH, y: VIDEO_HEIGHT)
let mixedTransform = rotation.concatenating(translateToCenter)
currentInstruction.setTransform(currentTrack.preferredTransform.concatenating(FirstAssetScaleFactor).concatenating(mixedTransform), at: kCMTimeZero)
} else {
currentInstruction.setTransform(currentTrack.preferredTransform.concatenating(FirstAssetScaleFactor), at: kCMTimeZero)
}
}
allVideoInstruction.append(currentInstruction) //Add video instruction in Instructions Array.
startDuration = CMTimeAdd(startDuration, currentAsset.duration)
} catch _ {
print(ERROR_LOADING_VIDEO)
}
}
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, startDuration)
mainInstruction.layerInstructions = allVideoInstruction
mainComposition.instructions = [mainInstruction]
mainComposition.frameDuration = CMTimeMake(1, 30)
mainComposition.renderSize = CGSize(width: 640, height: 480)
// Create path to store merged video.
let savePath = (getDocumentsDirectory() as NSString).appendingPathComponent("\(MERGED_VIDEO).mp4")
let url = URL(fileURLWithPath: savePath)
deleteFileAtPath(savePath)
guard let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVEXPORT_PRESET_NAME) else { return }
exporter.outputURL = url
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.shouldOptimizeForNetworkUse = false
exporter.videoComposition = mainComposition
// Perform the Export
exporter.exportAsynchronously() {
DispatchQueue.main.async { _ in
self.exportDidFinish(exporter)
}
}
}
func exportDidFinish(_ session: AVAssetExportSession) {
if session.status == AVAssetExportSessionStatus.completed {
print(session.outputURL)
}
}
func videoCompositionInstructionForTrack(_ track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
return instruction
}
extension AVAsset {
var g_size: CGSize {
return tracks(withMediaType: AVMediaTypeVideo).first?.naturalSize ?? .zero
}
var g_orientation: UIInterfaceOrientation {
guard let transform = tracks(withMediaType: AVMediaTypeVideo).first?.preferredTransform else {
return .portrait
}
switch (transform.tx, transform.ty) {
case (0, 0):
return .landscapeRight
case (g_size.width, g_size.height):
return .landscapeLeft
case (0, g_size.width):
return .portraitUpsideDown
default:
return .portrait
}
}
}
从文档目录中的所有视频创建 AVAssets 数组,并将其传递给 mergeVideo 函数以将它们全部合并。
关于ios - 合并目录中的多个视频,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/41973286/
我对此很陌生,我在这里的论坛上检查过答案,但我没有找到任何真正可以帮助我的答案。我正在尝试播放 res/raw 文件夹中的视频。到目前为止我已经设置了这段代码: MediaPlayer mp; @Ov
我可以播放一个视频剪辑,检测视频的结尾,然后创建一个表单,然后播放另一个视频剪辑。我的问题是,表单 react 不正确,我创建了带有提交按钮和两个单选按钮可供选择的表单。我希望让用户进行选择,验证响应
首先,我必须说我在web2py讨论组中看到过类似的内容,但我不太理解。 我使用 web2py 设置了一个数据库驱动的网站,其中的条目只是 HTML 文本。其中大多数将包含 img和/或video指向相
我正在尝试在视频 View 中播放 YouTube 视频。 我将 xml 布局如下: 代码是这样的: setContentView(R.layout.webview); VideoV
我正在开发一个需要嵌入其中的 youtube 视频播放器的 android 应用程序。我成功地从 API 获得了 RTSP 视频 URL,但是当我试图在我的 android 视频 View 中加载这个
我目前正在从事一个使用 YouTube API 的网络项目。 我完全不熟悉 API。所以每一行代码都需要付出很多努力。 使用以下代码,我可以成功检索播放列表中的项目: https://www.goog
是否可以仅使用视频 ID 和 key 使用 API V3 删除 youtube 视频?我不断收到有关“必需参数:部分”丢失的错误消息。我用服务器和浏览器 api 键试了一下这是我的代码: // $yo
所以我一直坚持这个大约一个小时左右,我就是无法让它工作。到目前为止,我一直在尝试从字符串中提取整个链接,但现在我觉得只获取视频 ID 可能更容易。 RegEx 需要从以下链接样式中获取 ID/URL,
var app = angular.module('speakout', []).config( function($sceDelegateProvider) {
我正在努力从 RSS 提要中阅读音频、视频新闻。我如何确定该 rss 是用于新闻阅读器还是用于音频或视频? 这是视频源:http://feeds.cbsnews.com/CBSNewsVideo 这是
利用python反转图片/视频 准备:一张图片/一段视频 python库:pillow,moviepy 安装库 ?
我希望在用户双击视频区域时让我的视频全屏显示,而不仅仅是在他们单击控件中的小图标时。有没有办法添加事件或其他东西来控制用户点击视频时发生的情况? 谢谢! 最佳答案 按照 Musa 的建议,附
关闭。这个问题需要更多 focused .它目前不接受答案。 想改进这个问题?更新问题,使其仅关注一个问题 editing this post . 7年前关闭。 Improve this questi
我有一个公司培训视频加载到本地服务器上。我正在使用 HTML5 的视频播放来观看这些视频。该服务器无法访问网络,但我已加载 apache 并且端口 8080 对同一网络上的所有机器开放。 这些文件位于
我想混合来自 video.mp4 的视频(时长 1 分钟)和来自 audio.mp3 的音频(10 分钟持续时间)到一个持续时间为 1 分钟的输出文件中。来自 audio.mp3 的音频应该是从 4
关闭。这个问题需要更多 focused .它目前不接受答案。 想改进这个问题?更新问题,使其仅关注一个问题 editing this post . 8年前关闭。 Improve this questi
我正在尝试使用 peer/getUserMedia 创建一个视频 session 网络应用程序。 目前,当我将唯一 ID 发送到视频 session 时,我能够听到/看到任何加入我的 session
考虑到一段时间内的观看次数,我正在评估一种针对半自动脚本的不同方法,该脚本将对视频元数据执行操作。 简而言之,只要视频达到指标中的某个阈值,就说观看次数,它将触发某些操作。 现在要执行此操作,我将不得
我正在通过iBooks创建专门为iPad创建动态ePub电子书的网站。 它需要支持youtube视频播放,所以当我知道视频的直接路径时,我正在使用html5 标记。 有没有一种使用html5 标签嵌入
我对Android不熟悉,我想浏览youtube.com并在Webview内从网站显示视频。当前,当我尝试执行此操作时,将出现设备的浏览器,并让我使用设备浏览器浏览该站点。如果Webview不具备这种
我是一名优秀的程序员,十分优秀!