- Java 双重比较
- java - 比较器与 Apache BeanComparator
- Objective-C 完成 block 导致额外的方法调用?
- database - RESTful URI 是否应该公开数据库主键?
我在一个作品中使用了 3 个视频。我似乎无法弄清楚为什么第二个将它们合并在一起后会播放黑色。它在调试时具有正确的时间和值,所以我知道它在那里。这是我的代码:
import UIKit
import AVFoundation
import MobileCoreServices
import CoreMedia
import AssetsLibrary
import MediaPlayer
import Photos
class MergeViewController: UIViewController, UINavigationControllerDelegate, UIImagePickerControllerDelegate, MPMediaPickerControllerDelegate {
let clipPicker = UIImagePickerController()
let audioPicker = MPMediaPickerController()
var isSelectingAsset: Int!
// video clip 1
var firstAsset: AVAsset!
// video clip 2
var secondAsset: AVAsset!
// video clip 3
var thirdAsset: AVAsset!
// sound track
var audioAsset: AVAsset!
// activity view indicator
override func viewDidLoad() {
super.viewDidLoad()
clipPicker.delegate = self
clipPicker.sourceType = UIImagePickerControllerSourceType.PhotoLibrary
clipPicker.mediaTypes = [kUTTypeMovie]
audioPicker.delegate = self
audioPicker.prompt = "Select Audio"
}
@IBAction func loadAsset1(sender: AnyObject) {
if UIImagePickerController.isSourceTypeAvailable(UIImagePickerControllerSourceType.PhotoLibrary) {
isSelectingAsset = 1
self.presentViewController(clipPicker, animated: true, completion: nil)
}
}
@IBAction func loadAsset2(sender: AnyObject) {
if UIImagePickerController.isSourceTypeAvailable(UIImagePickerControllerSourceType.PhotoLibrary) {
isSelectingAsset = 2
self.presentViewController(clipPicker, animated: true, completion: nil)
}
}
@IBAction func loadAsset3(sender: AnyObject) {
if UIImagePickerController.isSourceTypeAvailable(UIImagePickerControllerSourceType.PhotoLibrary) {
isSelectingAsset = 3
self.presentViewController(clipPicker, animated: true, completion: nil)
}
}
@IBAction func loadAudio(sender: AnyObject) {
self.presentViewController(audioPicker, animated: true, completion: nil)
}
@IBAction func mergeMedia(sender: AnyObject) {
if firstAsset != nil && secondAsset != nil && thirdAsset != nil {
// set up container to hold media tracks.
var mixComposition = AVMutableComposition()
// track times
let track1to2Time = CMTimeAdd(firstAsset.duration, secondAsset.duration)
let totalTime = CMTimeAdd(track1to2Time, thirdAsset.duration)
// create separate video tracks for individual adjustments before merge
var firstTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, firstAsset.duration),
ofTrack: firstAsset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack,
atTime: kCMTimeZero,
error: nil)
var secondTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
secondTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, secondAsset.duration),
ofTrack: secondAsset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack,
atTime: firstAsset.duration,
error: nil)
var thirdTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
thirdTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, thirdAsset.duration),
ofTrack: thirdAsset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack,
atTime: track1to2Time,
error: nil)
// Set up an overall instructions array
var mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, totalTime)
// Create seperate instructions for each track with helper method to correct orientation.
let firstInstruction = videoCompositionInstructionForTrack(firstTrack, asset: firstAsset)
// Make sure each track becomes transparent at end for the next one to play.
firstInstruction.setOpacity(0.0, atTime: firstAsset.duration)
let secondInstruction = videoCompositionInstructionForTrack(secondTrack, asset: secondAsset)
secondInstruction.setOpacity(0.0, atTime: secondAsset.duration)
let thirdInstruction = videoCompositionInstructionForTrack(thirdTrack, asset: thirdAsset)
// Add individual instructions to main for execution.
mainInstruction.layerInstructions = [firstInstruction, secondInstruction, thirdInstruction]
let mainComposition = AVMutableVideoComposition()
// Add instruction composition to main composition and set frame rate to 30 per second.
mainComposition.instructions = [mainInstruction]
mainComposition.frameDuration = CMTimeMake(1, 30)
mainComposition.renderSize = CGSize(
width: UIScreen.mainScreen().bounds.width,
height: UIScreen.mainScreen().bounds.height)
// get audio
if audioAsset != nil {
let audioTrack: AVMutableCompositionTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: 0)
audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, totalTime), ofTrack: audioAsset.tracksWithMediaType(AVMediaTypeAudio)[0] as! AVAssetTrack,
atTime: kCMTimeZero,
error: nil)
}
// get path
var paths: NSArray = NSSearchPathForDirectoriesInDomains(NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomainMask.UserDomainMask, true)
let documentsDirectory: String = paths[0] as! String
let id = String(arc4random() % 1000)
let myPathDocs: String = documentsDirectory.stringByAppendingPathComponent("mergeVideo-\(id).mov")
let url = NSURL(fileURLWithPath: myPathDocs)
// make exporter
var exporter = AVAssetExportSession(
asset: mixComposition,
presetName: AVAssetExportPresetHighestQuality)
exporter.outputURL = url
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.shouldOptimizeForNetworkUse = true
exporter.videoComposition = mainComposition
exporter.exportAsynchronouslyWithCompletionHandler() {
dispatch_async(dispatch_get_main_queue(), { () -> Void in
self.exportDidFinish(exporter)
})
}
}
}
// MARK: Image Picker Methods
func imagePickerController(picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [NSObject : AnyObject]) {
// Get the media type
let imageType: String = info[UIImagePickerControllerMediaType] as! String
let url: NSURL = info[UIImagePickerControllerMediaURL] as! NSURL
println(imageType)
// Place movie in queue.
if isSelectingAsset == 1 {
println("Success loading 1")
firstAsset = AVAsset.assetWithURL(url) as! AVAsset
} else if isSelectingAsset == 2 {
println("Success loading 2")
secondAsset = AVAsset.assetWithURL(url) as! AVAsset
} else if isSelectingAsset == 3 {
println("Success loading 3")
thirdAsset = AVAsset.assetWithURL(url) as! AVAsset
}
// Dismiss movie selection.
self.dismissViewControllerAnimated(true, completion: nil)
}
func mediaPicker(mediaPicker: MPMediaPickerController!, didPickMediaItems mediaItemCollection: MPMediaItemCollection!) {
let song: NSArray = [mediaItemCollection.items]
if song.count > 0 {
var selectedSong: MPMediaItem! = song[0][0] as! MPMediaItem
let url: NSURL = selectedSong.valueForProperty(MPMediaItemPropertyAssetURL) as! NSURL
audioAsset = AVAsset.assetWithURL(url) as! AVAsset
println("Audio loaded")
}
self.dismissViewControllerAnimated(true, completion: nil)
}
func mediaPickerDidCancel(mediaPicker: MPMediaPickerController!) {
self.dismissViewControllerAnimated(true, completion: nil)
}
// MARK: Merge Helper Methods
func exportDidFinish(session:AVAssetExportSession) {
assert(session.status == AVAssetExportSessionStatus.Completed, "Session status not completed")
if session.status == AVAssetExportSessionStatus.Completed {
let outputURL: NSURL = session.outputURL
let library = ALAssetsLibrary()
if library.videoAtPathIsCompatibleWithSavedPhotosAlbum(outputURL) {
library.writeVideoAtPathToSavedPhotosAlbum(outputURL, completionBlock: { (assetURL: NSURL!, error: NSError!) -> Void in
dispatch_async(dispatch_get_main_queue(), { () -> Void in
if (error != nil) {
let alert = UIAlertView(title: "Error", message: "Failed to save video.", delegate: nil, cancelButtonTitle: "OK")
alert.show()
} else {
let alert = UIAlertView(title: "Success", message: "Video saved.", delegate: nil, cancelButtonTitle: "OK")
alert.show()
}
})
})
}
}
audioAsset = nil
firstAsset = nil
secondAsset = nil
thirdAsset = nil
}
// Identify the correct orientation for the output video based on the input.
func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
var assetOrientation = UIImageOrientation.Up
var isPortrait = false
if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
assetOrientation = .Right
isPortrait = true
} else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
assetOrientation = .Left
isPortrait = true
} else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
assetOrientation = .Up
} else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
assetOrientation = .Down
}
return (assetOrientation, isPortrait)
}
func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
// get the asset tracks current orientation
let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
let assetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack
var transform = assetTrack.preferredTransform
// identify the needed orientation
let assetInfo = orientationFromTransform(transform)
// find the size needed to fit the track in the screen for landscape
var scaleToFitRatio = UIScreen.mainScreen().bounds.width / assetTrack.naturalSize.width
// if it is portrait, get the size to fit the track in the screen and return instruction to scale.
if assetInfo.isPortrait {
scaleToFitRatio = UIScreen.mainScreen().bounds.width / assetTrack.naturalSize.height
let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio)
instruction.setTransform(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor),
atTime: kCMTimeZero)
} else {
// If it is landscape then check for incorrect orientation and correct if needed, then return instructon to re-orient and scale.
let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio)
var concat = CGAffineTransformConcat(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor), CGAffineTransformMakeTranslation(0, UIScreen.mainScreen().bounds.width / 2))
if assetInfo.orientation == .Down {
let fixUpsideDown = CGAffineTransformMakeRotation(CGFloat(M_PI))
let windowBounds = UIScreen.mainScreen().bounds
let yFix = assetTrack.naturalSize.height + windowBounds.height
let centerFix = CGAffineTransformMakeTranslation(assetTrack.naturalSize.width, yFix)
concat = CGAffineTransformConcat(CGAffineTransformConcat(fixUpsideDown, centerFix), scaleFactor)
}
instruction.setTransform(concat, atTime: kCMTimeZero)
}
最佳答案
在第一个 + 第二个轨道持续时间之后,您是否应该将第二个轨道的不透明度设置为 0.0
而不是现在:
secondInstruction.setOpacity(0.0, atTime: secondAsset.duration)
到:
secondInstruction.setOpacity(0.0, atTime: firstAsset.duration+secondAsset.duration) //(Untested, but the idea is there).
关于ios - 为什么 AVFoundation 合并后第二个视频播放为黑色?,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/32636814/
我有一个 AVMutableVideoComposition,我正在使用 AVAssetExportSession 导出它,我正在尝试覆盖一个每秒更新一次的自定义时间戳。我应该使用 AVAssetWr
我是 MAC OSX 开发的新手。我想在 OSX 10.7 上使用 AVFoundation 将视频捕获为原始帧。我不明白为相机设备设置特定的视频分辨率,不知何故我使用 VideoSettings 设
使用 AVFoundation/QTKit 如何将多个视频设备同时录制到一个文件中? 我知道如何分别记录它们,但是尝试同时记录它们会导致诸如... “无法添加到 session 中,因为源和目标媒体类
我有一个关于 CoreAudio 和 AVFoundation 的问题。 我使用 CoreAudio 和 AUGraph 和 AudioUnit 构建了一个专业音频应用程序。 我想切换到看起来非常棒的
Apple Watch 能用吗AVFoundation ?更具体地说,可以AVAudioPlayer和 AVAudioRecorder工作? 我正在尝试制作一个应用程序,让您可以将声音录制到 Appl
Apple Watch 可以使用 AVFoundation 吗?更具体地说,AVAudioPlayer 和 AVAudioRecorder 可以工作吗? 我正在尝试制作一个应用程序,让您可以将声音录制
当我创建一个 iOS 项目和“Build Phases -> Link binary with Libraries”时,我添加了 AVFoundation.framework 库并使用 #import
我正在尝试创建一个格式正确的视频文件,用于 Apple 的 HTTP Live Streaming。这是创建文件的代码: // Init the device inputs AVCaptureDevi
我检查了答案,但找不到任何代码..我还登录以查看是否丢失了某些东西,但两个Logg都出现了,但声音没有播放...我也导入了 框架...我不知道我在做什么错我是新手,请帮忙...谢谢 - (void)v
对于这个问题,我只询问在没有外部库的情况下使用 Xcode 和 iOS 的可能性。我已经在探索在另一个 question 中使用 libtiff 的可能性. 问题 数周以来,我一直在筛选堆栈溢出,并为
我正在尝试采用不同的方法来组合视频。我正在为每个转换创建一个新轨道。 此代码的问题在于显示了第一个视频,而其他所有视频都是黑色的。 整个片段的音频覆盖都是正确的。看起来视频没有带入合成中,因为文件的大
回到使用AVAudioPlayer时,使用currentPosition来获取并设置位置,但是当使用AVAudioEngine和AVAudioPlayerNode时会使用什么呢? 最佳答案 AVAud
有人可以帮我将一个图标合并到一个视频中,该视频应该在视频播放时摆动。我已将动画添加到摆动图标,但只有静态图标会合并到视频中。如果有人可以帮助我,我将不胜感激。 我给你我正在使用的代码 //- (voi
Example 你好, 努力旋转此视频以显示在正确的方向并填满整个屏幕。 我无法使用 videocompisition 获取 avasset,但无法使其正常工作。 let videoAsset
如何一次在屏幕上显示多个相机预览? 如果我开始一个新的 AVCaptureSession,另一个就会停止。如果我使用与另一个 session 相同的 session 初始化一个新的 AVCapture
以下是我的录制按钮方法的内容。它创建了所需的文件,但无论我记录多长时间,创建的文件的长度始终为 4kb 和 0 秒,我无法弄清楚为什么它无效。对于averagePowerPerChannel,计量也返
我已经研究这个太久了。 我正在尝试获取 MacOS 网络摄像头数据并在网络摄像头输出的帧上运行 CIDetect。 我知道我需要: 连接 AVCaptureDevice (如输入)到AVCapture
我正在尝试从 CMSampleBufferRef 检索 CVPixelBufferRef,以便更改 CVPixelBufferRef 以动态覆盖水印。 我正在使用 CMSampleBufferGetI
捕获图像时,输出图像的颜色与我在预览图层上看到的颜色不同。出于某种原因,颜色略有变化。有没有人遇到过这个问题?我怎样才能解决这个问题? 当我从 didFinishProcessingPhotoSamp
对于 iOS5 来说可能是一个简单的问题他们将 AVFoundation 的eekToTime 方法更改为如下所示: [avPlayer seekToTime:startTime toleranceB
我是一名优秀的程序员,十分优秀!