gpt4 book ai didi

ios - Swift 4 中的视频动画

转载 作者:行者123 更新时间:2023-11-28 11:37:55 25 4
gpt4 key购买 nike

我试图理解视频中添加的动画。这里发生了什么动画?我已经了解如何将动画添加到整个视频中,但我想知道我们如何以不同的方式为视频的每个图像添加动画。

我使用以下代码来选择图像并合并视频。

     class ImageVideoMakerController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
playPauseBtn.isHidden = true
fromPlayVideo = true
fromSave = false
setUpInitialView()
}
func setUpInitialView(){
setUpArrays()
buildVideoFromImageArray()
transitionScrollViewCreation()
filterScrollContents()
}

@objc func filterActionTapped(sender:UIButton){
fromFilter = true
fromTransition = false
if(sender.tag==0){
player.pause()
player.seek(to: kCMTimeZero)
globalFilterName = "CISepiaTone"
applyFilter(globalFilterToBeApplied: globalFilterName!)
}else if(sender.tag==1){
player.pause()
player.seek(to: kCMTimeZero)

globalFilterName = "CIPhotoEffectChrome"
applyFilter(globalFilterToBeApplied: globalFilterName!)

}else if(sender.tag==2){
player.pause()
player.seek(to: kCMTimeZero)

globalFilterName = "CIPhotoEffectTransfer"
applyFilter(globalFilterToBeApplied: globalFilterName!)
}else if(sender.tag==3){
player.pause()
player.seek(to: kCMTimeZero)

globalFilterName = "CIPhotoEffectTonal"
applyFilter(globalFilterToBeApplied: globalFilterName!)
}else if(sender.tag==4){
player.pause()
player.seek(to: kCMTimeZero)
globalFilterName = "CIPhotoEffectProcess"
applyFilter(globalFilterToBeApplied: globalFilterName!)
}else if(sender.tag==5){
player.pause()
player.seek(to: kCMTimeZero)

globalFilterName = "CIPhotoEffectNoir"
applyFilter(globalFilterToBeApplied: globalFilterName!)
}else if(sender.tag==6){
player.pause()
player.seek(to: kCMTimeZero)

globalFilterName = "CIPhotoEffectInstant"
applyFilter(globalFilterToBeApplied: globalFilterName!)
}
else if(sender.tag==7){
player.pause()
player.seek(to: kCMTimeZero)

globalFilterName = "CIPhotoEffectFade"
applyFilter(globalFilterToBeApplied: globalFilterName!)
}
}
func applyFilter(globalFilterToBeApplied:String){
let filter = CIFilter(name: globalFilterToBeApplied)!
let composition = AVVideoComposition(asset: asset, applyingCIFiltersWithHandler: { request in
let source = request.sourceImage.clampedToExtent()
filter.setValue(source, forKey: kCIInputImageKey)
let output = filter.outputImage!.cropped(to: request.sourceImage.extent)
request.finish(with: output, context: nil)
})
globalrVideoComposition = composition
self.playVideoInPlayer(animatedVideoURL:self.globalVideoURL as URL) }

func playVideoInPlayer(animatedVideoURL:URL){
if(globalFilterName != nil){
self.asset = AVAsset.init(url:animatedVideoURL as URL)
let newPlayerItem = AVPlayerItem.init(asset:self.asset);
newPlayerItem.videoComposition=globalrVideoComposition
self.player = AVPlayer.init(playerItem:newPlayerItem)
}else{
let newPlayerItem = AVPlayerItem.init(url:animatedVideoURL)
self.player = AVPlayer.init(playerItem:newPlayerItem)
}

NotificationCenter.default.addObserver(self, selector: #selector(self.finishedPlaying(_:)), name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object:nil)
self.playerLayer = AVPlayerLayer.init(player:self.player)
let width: CGFloat = self.videoContainerView.frame.size.width
let height: CGFloat = self.videoContainerView.frame.size.height
self.playerLayer.frame = CGRect(x: 0.0, y:0, width: width, height: height)
self.playerLayer.backgroundColor = UIColor.black.cgColor
self.playerLayer.videoGravity = .resizeAspectFill
self.videoContainerView.layer.addSublayer( self.playerLayer)

self.playPauseBtn.isHidden = false

self.playPauseBtn.setImage(UIImage.init(named:"pause"), for:.normal)

DispatchQueue.main.async {
MBProgressHUD.hideAllHUDs(for:self.view, animated:true)
self.player.play()
}

}
func exportVideoWithAnimation() {
let composition = AVMutableComposition()
let track = self.asset?.tracks(withMediaType: AVMediaType.video)
let videoTrack:AVAssetTrack = track![0] as AVAssetTrack
let timerange = CMTimeRangeMake(kCMTimeZero, (self.asset?.duration)!)

let compositionVideoTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: CMPersistentTrackID())!

do {
try compositionVideoTrack.insertTimeRange(timerange, of: videoTrack, at: kCMTimeZero)
compositionVideoTrack.preferredTransform = videoTrack.preferredTransform
} catch {
print(error)
}

//if your video has sound, you don’t need to check this
if self.audioIsEnabled {
let compositionAudioTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: CMPersistentTrackID())!

for audioTrack in (self.asset?.tracks(withMediaType: AVMediaType.audio))! {
do {
try compositionAudioTrack.insertTimeRange(audioTrack.timeRange, of: audioTrack, at: kCMTimeZero)
} catch {
print(error)
}
}
}

let size = videoTrack.naturalSize

let videolayer = CALayer()
videolayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)

let parentlayer = CALayer()
parentlayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)
parentlayer.addSublayer(videolayer)
var time = [0.00001, 3, 6, 9, 12] //I used this time array to determine the start time of a frame animation. Each frame will stay for 3 secs, thats why their difference is 3
var imgarray = self.selectedImageArray

for image in 0..<self.selectedImageArray.count {


let nextPhoto = imgarray[image]

let horizontalRatio = CGFloat(self.outputSize.width) / nextPhoto.size.width
let verticalRatio = CGFloat(self.outputSize.height) / nextPhoto.size.height
let aspectRatio = min(horizontalRatio, verticalRatio)
let newSize: CGSize = CGSize(width: nextPhoto.size.width * aspectRatio, height: nextPhoto.size.height * aspectRatio)
let x = newSize.width < self.outputSize.width ? (self.outputSize.width - newSize.width) / 2 : 0
let y = newSize.height < self.outputSize.height ? (self.outputSize.height - newSize.height) / 2 : 0
let blackLayer = CALayer()
///#7. opacity(1->0)(top->bottom)///
//#3. top->bottom///
//MARK:- Animations==================================
///#1. left->right///
if(self.globalSelectedTransitionTag == 0){

blackLayer.frame = CGRect(x: -videoTrack.naturalSize.width, y: 0, width: videoTrack.naturalSize.width, height: videoTrack.naturalSize.height)
blackLayer.backgroundColor = UIColor.black.cgColor

let imageLayer = CALayer()
imageLayer.frame = CGRect(x: x, y: y, width: newSize.width, height: newSize.height)
imageLayer.contents = imgarray[image].cgImage
blackLayer.addSublayer(imageLayer)


let animation = CABasicAnimation()
animation.keyPath = "position.x"
animation.fromValue = -videoTrack.naturalSize.width
animation.toValue = 5 * (videoTrack.naturalSize.width)
animation.duration = 5
animation.beginTime = CFTimeInterval(time[image])
animation.fillMode = kCAFillModeForwards
animation.isRemovedOnCompletion = false
blackLayer.add(animation, forKey: "opacity")
}
parentlayer.addSublayer(blackLayer)
}
let layercomposition = AVMutableVideoComposition()
layercomposition.frameDuration = CMTimeMake(1, 30)
layercomposition.renderSize = size

layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, in: parentlayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
let videotrack = composition.tracks(withMediaType: AVMediaType.video)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
instruction.layerInstructions = [layerinstruction]
layercomposition.instructions = [instruction]
if(fromTransition){
self.globalrVideoComposition = layercomposition
}
let animatedVideoURL = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/video2.mp4")
self.removeFileAtURLIfExists(url: animatedVideoURL)

guard let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetHighestQuality) else {return}
assetExport.videoComposition = self.globalrVideoComposition
assetExport.outputFileType = AVFileType.mp4
assetExport.outputURL = animatedVideoURL as URL
print("****** animatedVideoURL *****",animatedVideoURL)
assetExport.exportAsynchronously(completionHandler: {
switch assetExport.status{
case AVAssetExportSessionStatus.failed:
print("failed \(String(describing: assetExport.error))")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(String(describing: assetExport.error))")
default:
print("Exported")

if(self.fromPlayVideo){
DispatchQueue.main.async {

self.globalVideoURL = animatedVideoURL; self.playVideoInPlayer(animatedVideoURL: animatedVideoURL as URL)
}
}else if(self.fromSave){

PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: animatedVideoURL as URL)
}) { saved, error in

}

if saved {


}else{

}
}
}
}
})
}

//MARK:- Make ScrollViews
@objc func transitionTapped(sender:UIButton){
self.fromSave = false
self.fromPlayVideo = true
self.playPauseBtn.isHidden = true

self.playerLayer.removeFromSuperlayer()

globalSelectedTransitionTag = sender.tag
exportVideoWithAnimation()

}
}

最佳答案

如果我没记错的话,那只不过是一个不透明动画。

let animation = CABasicAnimation(keyPath: "opacity")

它会在几秒钟内“淡入”。

看起来还有一个“缩放动画”,它只是按比例放大。

  1. 你给的代码写的乱七八糟,我一个初学者就忘了不看。

  2. 作为初学者,我不会跳入“视频......和动画!”一开始。

只需尝试在您的应用中制作一些“简单”的动画。一个好的开始是“在屏幕上滑动和关闭”或者可能只是淡入和淡出的东西。 (因此,尝试对按钮等执行这些操作。)

关于ios - Swift 4 中的视频动画,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/54572851/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com