- html - 出于某种原因,IE8 对我的 Sass 文件中继承的 html5 CSS 不友好?
- JMeter 在响应断言中使用 span 标签的问题
- html - 在 :hover and :active? 上具有不同效果的 CSS 动画
- html - 相对于居中的 html 内容固定的 CSS 重复背景?
我正在做一个合并两个视频的工作,发现一个奇怪的问题,当我尝试合并一个纵向视频和一个横向视频时,输出结果以纵向模式显示视频的横向版本,这是我不希望发生的。
我很确定我在管理方向上缺少一些基本的东西。
我的代码在这里:
NSMutableArray *videoClipPaths=[[NSMutableArray alloc]init];
[videoClipPaths addObject:[NSURL URLWithString:videoBundleURL1]];
[videoClipPaths addObject:[NSURL URLWithString:videoBundleURL2]];
[videoClipPaths addObject:[NSURL URLWithString:videoBundleURL1]];
float start_time=startSeconds;
float end_time=endSeconds;
AVMutableComposition *mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionTrack2 = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
__block CMTime time = kCMTimeZero;
__block CGAffineTransform translate;
__block CGSize size;
for (int i=0; i<[videoClipPaths count]; i++)
{
AVURLAsset *assetClip = [AVURLAsset URLAssetWithURL:[videoClipPaths objectAtIndex:i] options:nil];
AVAssetTrack *clipVideoTrackB = [[assetClip tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAsset *anAsset = [[AVURLAsset alloc] initWithURL:[videoClipPaths objectAtIndex:i] options:nil];
CMTime start;
CMTime duration;
CMTimeRange video_timeRange;
if (i==0){
start = CMTimeMakeWithSeconds(0.0f, anAsset.duration.timescale);
duration = CMTimeMakeWithSeconds(start_time, anAsset.duration.timescale);
video_timeRange = CMTimeRangeMake(kCMTimeZero,duration);
[compositionTrack insertTimeRange:video_timeRange ofTrack:clipVideoTrackB atTime:start error:nil];
size = CGSizeMake(clipVideoTrackB.naturalSize.height, clipVideoTrackB.naturalSize.height);
translate = CGAffineTransformMakeTranslation(-420, 0);
CGAffineTransform newTransform = CGAffineTransformConcat(translate, clipVideoTrackB.preferredTransform);
[compositionTrack setPreferredTransform:newTransform];
time = CMTimeAdd(time, duration);
}else if (i==1){
CMTime duration = anAsset.duration;
float seconds = CMTimeGetSeconds(duration);
start = CMTimeMakeWithSeconds(start_time, anAsset.duration.timescale);
duration = CMTimeMakeWithSeconds(seconds, anAsset.duration.timescale);
video_timeRange = CMTimeRangeMake(kCMTimeZero,duration);
[compositionTrack insertTimeRange:video_timeRange ofTrack:clipVideoTrackB atTime:start error:nil];
translate = CGAffineTransformMakeTranslation(-420, 0);
CGAffineTransform newTransform = CGAffineTransformConcat(translate, clipVideoTrackB.preferredTransform);
[compositionTrack setPreferredTransform:newTransform];
time = CMTimeAdd(time, duration);
}
else if (i==2){
CMTime duration = anAsset.duration;
float seconds = CMTimeGetSeconds(duration);
start = CMTimeMakeWithSeconds(end_time, anAsset.duration.timescale);
duration = CMTimeMakeWithSeconds(seconds-start_time, anAsset.duration.timescale);
// duration = CMTimeMakeWithSeconds(anAsset.duration.timescale, anAsset.duration.timescale);
video_timeRange = CMTimeRangeMake(CMTimeMakeWithSeconds(start_time, anAsset.duration.timescale),duration);
[compositionTrack insertTimeRange:video_timeRange ofTrack:clipVideoTrackB atTime:kCMTimeInvalid error:nil];
translate = CGAffineTransformMakeTranslation(-420, 0);
CGAffineTransform newTransform = CGAffineTransformConcat(translate, clipVideoTrackB.preferredTransform);
[compositionTrack setPreferredTransform:newTransform];
time = CMTimeAdd(time, duration);
}
// video_timeRange = CMTimeRangeMake(start,duration);
//merge audio of video files
AVAssetTrack *clipVideoTrackB1 = [[assetClip tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
/*CMTime start1;
CMTime duration1;
CMTimeRange video_timeRange1;
if (i==0){
start1 = CMTimeMakeWithSeconds(0.0, anAsset.duration.timescale);
duration1 = CMTimeMakeWithSeconds(start_time, anAsset.duration.timescale);
}else if (i==1){
start1 = CMTimeMakeWithSeconds(start_time, anAsset.duration.timescale);
duration1 = CMTimeMakeWithSeconds(end_time-start_time, anAsset.duration.timescale);
}
else if (i==2){
start1 = CMTimeMakeWithSeconds(end_time, anAsset.duration.timescale);
duration1 = CMTimeMakeWithSeconds(anAsset.duration.timescale, anAsset.duration.timescale);
}
video_timeRange1 = CMTimeRangeMake(start,duration);*/
[compositionTrack2 insertTimeRange:video_timeRange ofTrack:clipVideoTrackB1 atTime:start error:nil];
}
AVMutableVideoCompositionInstruction *vtemp = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
vtemp.timeRange = CMTimeRangeMake(kCMTimeZero, time);
NSLog(@"\nInstruction vtemp's time range is %f %f", CMTimeGetSeconds( vtemp.timeRange.start),
CMTimeGetSeconds(vtemp.timeRange.duration));
// Also tried videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack
AVMutableVideoCompositionLayerInstruction *vLayerInstruction = [AVMutableVideoCompositionLayerInstruction
videoCompositionLayerInstructionWithAssetTrack:compositionTrack];
[vLayerInstruction setTransform:compositionTrack.preferredTransform atTime:kCMTimeZero];
vtemp.layerInstructions = @[vLayerInstruction];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.renderSize = size;
videoComposition.frameDuration = CMTimeMake(1,30);
videoComposition.instructions = @[vtemp];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
NSParameterAssert(exporter != nil);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsPath = [paths objectAtIndex:0]; //Get the docs directory
documentsPath=[documentsPath stringByAppendingString:@"/MergeVideos"];
最佳答案
我有 Swift 版本。我的一个完美地工作..这是我的一个..经过几个小时的编码终于让它工作了。很抱歉在 Swift 中发布答案。希望您觉得这个有帮助。readyVideoURLs
是包含所有视频 URL。您可以使用此代码合并任意数量的视频。
我正在制作方形大小的视频。mainComposition.renderSize = CGSize(width: 600, height: 600)
func mergeVideos() {
let composition = AVMutableComposition()
let compositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
var current = kCMTimeZero
var assetArr = [AVURLAsset]()
let mainInstruction = AVMutableVideoCompositionInstruction()
for url in readyVideoURLs {
assetArr.append(AVURLAsset(url: url))
print("readyVideoURL vid url:- \(url)")
}
for asset in assetArr {
do {
try compositionTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, asset.duration), of: asset.tracks(withMediaType: AVMediaTypeVideo)[0], at: kCMTimeZero)
} catch let error {
print(error.localizedDescription)
}
let instruction = videoCompositionInstructionForTrack(track: compositionTrack, asset: asset)
mainInstruction.layerInstructions.append(instruction)
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(current, asset.duration))
current = CMTimeAdd(current, asset.duration)
}
let mainComposition = AVMutableVideoComposition()
mainComposition.instructions = [mainInstruction]
mainComposition.frameDuration = CMTimeMake(1, 30)
mainComposition.renderSize = CGSize(width: 600, height: 600)
//add audio track
// if let loadedAudioAsset = audioAsset {
// let audioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: 0)
// do {
// try audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, current),
// of: loadedAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
// } catch _ {
// print("Failed to load Audio track")
// }
// }
let finalVideoPath = NSHomeDirectory().appending("/Documents/finalDailyVideo.mp4")
if FileManager.default.fileExists(atPath: finalVideoPath) {
do {
try FileManager.default.removeItem(atPath: finalVideoPath)
} catch let error {
print(error.localizedDescription)
}
}
guard let exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality) else { return }
exporter.outputURL = URL(fileURLWithPath: finalVideoPath)
exporter.outputFileType = AVFileTypeMPEG4
exporter.shouldOptimizeForNetworkUse = true
saveToAlbum(mergedVidUrl: URL(string:finalVideoPath)!, exporter: exporter)
}
func saveToAlbum(mergedVidUrl : URL, exporter : AVAssetExportSession) {
exportFinalVideo(exporter: exporter) { (completed) in
if completed {
let assetLib = ALAssetsLibrary()
assetLib.writeVideoAtPath(toSavedPhotosAlbum: mergedVidUrl, completionBlock: { (assetUrl, error) in
if error == nil {
DispatchQueue.main.async {
self.showAlert(title: "Congrats..",message: "Your daily Vlog was saved in albums", actionTitle: "Got it")
}
}
})
}
}
}
func exportFinalVideo(exporter: AVAssetExportSession, completion:@escaping(Bool) -> ()) {
exporter.exportAsynchronously() {
if exporter.status == .exporting {
print("EXPORTING...")
} else if exporter.status == .completed {
print("merged video exporting DONE")
DispatchQueue.main.async {
self.dismiss(animated: true, completion: nil)
}
completion(true)
} else if exporter.status == .failed {
completion(false)
self.showAlert(title: "Oops!", message: "Something went wrong. Video could not be created.", actionTitle: "Okay!")
}
}
}
func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
let assetTrack = asset.tracks(withMediaType: AVMediaTypeVideo)[0]
let transform = assetTrack.preferredTransform
let assetInfo = orientationFromTransform(transform: transform)
var scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.width
if assetInfo.isPortrait {
scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.height
let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
instruction.setTransform(assetTrack.preferredTransform.concatenating(scaleFactor),
at: kCMTimeZero)
} else {
let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
var concat = assetTrack.preferredTransform.concatenating(scaleFactor).concatenating(CGAffineTransform(translationX: 0, y: UIScreen.main.bounds.width / 2))
if assetInfo.orientation == .down {
let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(Double.pi))
let windowBounds = UIScreen.main.bounds
let yFix = assetTrack.naturalSize.height + windowBounds.height
let centerFix = CGAffineTransform(translationX: assetTrack.naturalSize.width, y: yFix)
concat = fixUpsideDown.concatenating(centerFix).concatenating(scaleFactor)
}
instruction.setTransform(concat, at: kCMTimeZero)
}
return instruction
}
func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
var assetOrientation = UIImageOrientation.up
var isPortrait = false
if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
assetOrientation = .right
isPortrait = true
} else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
assetOrientation = .left
isPortrait = true
} else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
assetOrientation = .up
} else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
assetOrientation = .down
}
return (assetOrientation, isPortrait)
}
关于ios - 在 ios 应用程序中合并两个视频仍然保持每个视频的方向?,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/25104232/
每次我尝试构建(执行完整的 Clean,然后构建)时,我都会在 Eclipse 的问题部分下弹出此错误消息。项目本身旁边还显示一个错误。 我已经尝试了同一问题的其他解决方案中包含的所有内容: 删除项目
我收到以下错误(注意:我使用的是 Netbeans): java.sql.SQLException: No suitable driver found for jdbc:derby://localho
例如 //somewhere struct IFace; struct Base { Base(IFace* iface): f(iface) { //wi
我试图通过 stringstream 将 double 变成字符串,但它不起作用。 std::string MatlabPlotter::getTimeVector( unsigned int xve
我正在尝试使用 AudioKit 框架中的音序器播放音频文件。 AudioKit.output = sampler AudioKit.start() sampler.enableMID
昨天我问了一个关于插入 Heroku 的问题。它不工作,然后突然开始工作。我什么都没改变。现在在一个新的应用程序上,我遇到了完全相同的问题。我决定包含我的整个 Gemfile,希望我可以继续没有这种令
我知道,这个topic已经是discussed许多times,所以直截了当。 这是ItemsSource的TabControl: Tabs = new ObservableCollection {
我有一个更新对象的函数,问题是当我从更新表单字段返回到详细 View 时,它初始化旧对象而不是更新后的对象。 我想在 CarService 而不是 app.js 中填充汽车列表 这是我的汽车服务:
在 resolution comments错误报告 12266 (“套接字连接错误导致资源泄漏”),Robert Ehteshamzadeh 写道 TClientSocket is deprecate
我最初发布了一个问题 here 我发现 JTextField 仅在 JScrollPane 存在时才调整大小。换句话说,我可以根据需要最小化和最大化它,直到出现滚动条(因为文本太多,无法放入窗口)。之
我读过关于 postion:absolute 的问题并尝试了几乎所有可能的解决方案。包括相对定位 div,将它们包装在相对定位的父级中等等,但它没有帮助。 我正在绘制一个表格,然后我将 div 放入其
我在这里发起了一个话题document.getElementById not working但看起来即使提出的建议都是有效的,我仍然有问题。 我有几个复选框。当我在这里查看页面源代码时,有。 docu
我正在做一些阅读,试图更好地理解按位运算符,然后偶然发现了 a helpful old blog post from 2012 ,其中指出 - 在随机正整数 x 的奇数测试中 - 在作者的计算机上评估
我正在尝试在 Eclipse Neon 中使用 aspectj 创建一个示例 maven 项目。然而,方面并没有编织/工作(参见下面的输出)。我尝试寻找很多原因和解决方案,但没有一个有效(请参阅下面的
无论我如何配置我的 appsettings.json 和 appsettings.Development.json,除非我手动添加 ConfigureLogging,否则我无法在信息消息下方记录任何内
我正在尝试使用 JQuery .get() 方法和 JavaScript for 循环来处理来自外部文件的一些数据。我已经在 stackoverflow 上阅读了有关闭包和回调返回值的内容几个小时,但
我正在使用 PHP 5.6 并且要打印一些东西,我必须编辑 php.ini 并包含 php_printer.dll 文件。但是 PHP 5.6 没有.dll 文件。 我要解决的问题: 我想将凭证打印机
我目前正在调试一个包含内存泄漏的大(非常大!)C# 应用程序。它主要使用 Winforms 作为 GUI,尽管一些控件是在 WPF 中制作的,并由 ElementHost 托管。直到现在,我发现许多内
[已解决] 看来 PHP MYADMIN 变量成功了。我将 wait_timeout 设置为 30 ,并将 Lock_wait_timeout 设置为 50 花了将近 6 个小时才恢复稳定,包括几次重
我读过几个关于该主题的讨论,有人说 qmake < 3.0 不正确支持该指令。我刚刚为 g++-64 重新安装了 Qt 5.9.1,但问题仍然存在。此外,我尝试过各种 mkspecs/xxx/xxx.
我是一名优秀的程序员,十分优秀!