gpt4 book ai didi

ios - AVMutableComposition y 轴反转

转载 作者:行者123 更新时间:2023-12-01 16:24:25 25 4
gpt4 key购买 nike

我在视频上添加水印。我设置了原点水印层(10;10),但是导出后,水印位于左下角。如果我增加 y,水印会向上移动。好像y轴自下而上增加。

任何人都可以提出一些建议吗?

NSString *path = [[NSBundle mainBundle] pathForResource:@"video" ofType:@"mov"];
NSURL *file = [NSURL fileURLWithPath:path];
AVURLAsset *videoAsset = [[AVURLAsset alloc]initWithURL:file options:nil];
self.asset = videoAsset;
AVMutableComposition *mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] firstObject];
AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] firstObject];
if (clipAudioTrack)
{
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:clipAudioTrack
atTime:kCMTimeZero
error:nil];
}
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:clipVideoTrack
atTime:kCMTimeZero
error:nil];
[compositionVideoTrack setPreferredTransform:clipVideoTrack.preferredTransform];

CGSize videoSize = [clipVideoTrack naturalSize];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);

UIImage *myImage = [UIImage imageNamed:@"watermark"];
CALayer *aLayer = [CALayer layer];
CGRect frame = CGRectMake(10, 10, CGImageGetWidth(myImage.CGImage), CGImageGetHeight(myImage.CGImage));
aLayer.frame = frame;
aLayer.contents = (id)myImage.CGImage;

[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];

AVMutableVideoComposition *videoComposition=[AVMutableVideoComposition videoComposition] ;
videoComposition.frameDuration=CMTimeMake(1, 30);
videoComposition.renderSize = videoSize;
videoComposition.animationTool=[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];

AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] firstObject];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = @[layerInstruction];
videoComposition.instructions = @[instruction];

NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) firstObject];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:@"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:@"/utput_%@.mov", [dateFormatter stringFromDate:[NSDate date]]];

AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
exportSession.videoComposition = videoComposition;

exportSession.outputURL = [NSURL fileURLWithPath:destinationPath];
exportSession.outputFileType = AVFileTypeMPEG4;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status)
{
case AVAssetExportSessionStatusCompleted:
NSLog(@"Export OK");
if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(destinationPath)) {
UISaveVideoAtPathToSavedPhotosAlbum(destinationPath, self, nil, nil);
}
break;
case AVAssetExportSessionStatusFailed:
NSLog (@"AVAssetExportSessionStatusFailed: %@", exportSession.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(@"Export Cancelled");
break;
default:
break;
}
}];

最佳答案

我过去在找到正确的 anchor 添加水印时遇到了很多问题。我会发布我的代码。请记住,它会在右下角生成带有水印的视频(5 像素边距)。

注意:

logoImageView.frame = CGRectMake(finalSize.width - ((logoImageView.frame.size.width/2) + 5),
finalSize.height - ((logoImageView.frame.size.height/2) + 5),
logoImageView.frame.size.width/2,
logoImageView.frame.size.height/2)

最终代码:
final class QUWatermarkManager {

static func watermark(video videoAsset:AVAsset, imageLayer : CALayer, saveToLibrary flag : Bool, completion : ((status : AVAssetExportSessionStatus!, session: AVAssetExportSession!, outputURL : NSURL!) -> ())?) {

var finalSize = CGSizeMake(imageLayer.frame.size.width, imageLayer.frame.size.width)
var computedVideoSize = finalSize.width
while (computedVideoSize%16>0) { // find the right resolution that can be divided by 16
computedVideoSize++;
}

finalSize = CGSizeMake(computedVideoSize,computedVideoSize)

let clipVideoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo).first! as AVAssetTrack
let videoSize = clipVideoTrack.naturalSize
let scale = finalSize.width / videoSize.width

let composition = AVMutableComposition()
composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())

let videoComposition = AVMutableVideoComposition()


videoComposition.renderSize = CGSizeMake(finalSize.width,finalSize.width)
videoComposition.frameDuration = CMTimeMake(1, 30)

let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(180, 30))

let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
let t4 = CGAffineTransformScale(clipVideoTrack.preferredTransform, scale, scale)
layerInstruction.setTransform(t4, atTime: kCMTimeZero)
instruction.layerInstructions = [layerInstruction]
videoComposition.instructions = [instruction]

// Parent Layer
let parentLayer = CALayer()
parentLayer.frame = CGRectMake(0, 0, finalSize.width, finalSize.height)
parentLayer.geometryFlipped = true

//Video Layer
let videoLayer = CALayer()
videoLayer.frame = CGRectMake(0, 0, finalSize.width, finalSize.height)
videoLayer.geometryFlipped = true

let logoImageView = UIImageView(image: UIImage(named: "logo_nav2"))
logoImageView.alpha = 0.5
logoImageView.contentMode = .ScaleAspectFit
logoImageView.frame = CGRectMake(finalSize.width - ((logoImageView.frame.size.width/2) + 5),
finalSize.height - ((logoImageView.frame.size.height/2) + 5),
logoImageView.frame.size.width/2,
logoImageView.frame.size.height/2)

parentLayer.addSublayer(videoLayer)
parentLayer.addSublayer(imageLayer)
parentLayer.addSublayer(logoImageView.layer)


videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, inLayer: parentLayer)

// 4 - Get path
let documentDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
let dateFormatter = NSDateFormatter()
dateFormatter.dateStyle = .LongStyle
dateFormatter.timeStyle = .FullStyle
let date = dateFormatter.stringFromDate(NSDate())
let savePath = documentDirectory.stringByAppendingPathComponent("watermarkVideo-\(date).mov")
let url = NSURL(fileURLWithPath: savePath)

let exporter = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPresetHighestQuality)!
exporter.videoComposition = videoComposition
exporter.outputURL = url
exporter.outputFileType = AVFileTypeMPEG4
exporter.shouldOptimizeForNetworkUse = true
exporter.canPerformMultiplePassesOverSourceMediaData = true

exporter.exportAsynchronouslyWithCompletionHandler() {
dispatch_async(dispatch_get_main_queue(), { () -> Void in
if exporter.status == AVAssetExportSessionStatus.Completed {
let outputURL = exporter.outputURL
if flag {
// Save to library
let library = ALAssetsLibrary()
if library.videoAtPathIsCompatibleWithSavedPhotosAlbum(outputURL) {
library.writeVideoAtPathToSavedPhotosAlbum(outputURL,
completionBlock: { (assetURL:NSURL!, error:NSError!) -> Void in
completion!(status: .Completed, session: exporter, outputURL: outputURL)
})
}
} else {
// Dont svae to library
completion!(status: .Completed, session: exporter, outputURL: outputURL)
}

} else {
// Error
completion!(status: exporter.status, session: exporter, outputURL: exporter.outputURL)
}
})
}

}
}

关于ios - AVMutableComposition y 轴反转,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/38960520/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com