gpt4 book ai didi

iphone - 如何更改视频方向

转载 作者:塔克拉玛干 更新时间:2023-11-02 20:21:20 26 4
gpt4 key购买 nike

我目前有一个 iPhone 应用程序,它允许用户拍摄视频,将其上传到服务器,并允许其他人从应用程序中查看他们的视频。 在我创建一个网站来观看不同的视频(以及其他内容)之前,视频的方向从来没有问题

我使用来自网络服务的视频,并使用 videojs 使用 ajax 加载它们,但它们中的每一个都向左旋转 90 度。从我读到的内容来看,方向信息似乎可以在 iOS 中读取,但不能在网站上读取。 在发送到服务器之前,有什么方法可以在 iphone 上保存视频的新方向

最佳答案

//change Orientation Of video 

//in videoorientation.h

#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
@interface videoorientationViewController : UIViewController
@property AVMutableComposition *mutableComposition;
@property AVMutableVideoComposition *mutableVideoComposition;
@property AVMutableAudioMix *mutableAudioMix;
@property AVAssetExportSession *exportSession;
- (void)performWithAsset : (NSURL *)moviename;
@end

In //viewcontroller.m

- (void)performWithAsset : (NSURL *)moviename
{

self.mutableComposition=nil;
self.mutableVideoComposition=nil;
self.mutableAudioMix=nil;

// NSString* filename = [NSString stringWithFormat:@"temp1.mov"];
//
// NSLog(@"file name== %@",filename);
//
// [[NSUserDefaults standardUserDefaults]setObject:filename forKey:@"currentName"];
// NSString* path = [NSTemporaryDirectory() stringByAppendingPathComponent:filename];

// NSLog(@"file number %i",_currentFile);

// NSURL* url = [NSURL fileURLWithPath:path];

// NSString *videoURL = [[NSBundle mainBundle] pathForResource:@"Movie" ofType:@"m4v"];

AVAsset *asset = [[AVURLAsset alloc] initWithURL:moviename options:nil];

AVMutableVideoCompositionInstruction *instruction = nil;
AVMutableVideoCompositionLayerInstruction *layerInstruction = nil;
CGAffineTransform t1;
CGAffineTransform t2;

AVAssetTrack *assetVideoTrack = nil;
AVAssetTrack *assetAudioTrack = nil;
// Check if the asset contains video and audio tracks
if ([[asset tracksWithMediaType:AVMediaTypeVideo] count] != 0) {
assetVideoTrack = [asset tracksWithMediaType:AVMediaTypeVideo][0];
}
if ([[asset tracksWithMediaType:AVMediaTypeAudio] count] != 0) {
assetAudioTrack = [asset tracksWithMediaType:AVMediaTypeAudio][0];
}

CMTime insertionPoint = kCMTimeZero;
NSError *error = nil;


// Step 1
// Create a composition with the given asset and insert audio and video tracks into it from the asset
if (!self.mutableComposition) {

// Check whether a composition has already been created, i.e, some other tool has already been applied
// Create a new composition
self.mutableComposition = [AVMutableComposition composition];

// Insert the video and audio tracks from AVAsset
if (assetVideoTrack != nil) {
AVMutableCompositionTrack *compositionVideoTrack = [self.mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:assetVideoTrack atTime:insertionPoint error:&error];
}
if (assetAudioTrack != nil) {
AVMutableCompositionTrack *compositionAudioTrack = [self.mutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:assetAudioTrack atTime:insertionPoint error:&error];
}

}


// Step 2
// Translate the composition to compensate the movement caused by rotation (since rotation would cause it to move out of frame)
t1 = CGAffineTransformMakeTranslation(assetVideoTrack.naturalSize.height, 0.0);
float width=assetVideoTrack.naturalSize.width;
float height=assetVideoTrack.naturalSize.height;
float toDiagonal=sqrt(width*width+height*height);
float toDiagonalAngle = radiansToDegrees(acosf(width/toDiagonal));
float toDiagonalAngle2=90-radiansToDegrees(acosf(width/toDiagonal));

float toDiagonalAngleComple;
float toDiagonalAngleComple2;
float finalHeight = 0.0;
float finalWidth = 0.0;

float degrees=90;

if(degrees>=0&&degrees<=90){

toDiagonalAngleComple=toDiagonalAngle+degrees;
toDiagonalAngleComple2=toDiagonalAngle2+degrees;

finalHeight=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple)));
finalWidth=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple2)));

t1 = CGAffineTransformMakeTranslation(height*sinf(degreesToRadians(degrees)), 0.0);
}
else if(degrees>90&&degrees<=180){


float degrees2 = degrees-90;

toDiagonalAngleComple=toDiagonalAngle+degrees2;
toDiagonalAngleComple2=toDiagonalAngle2+degrees2;

finalHeight=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple2)));
finalWidth=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple)));

t1 = CGAffineTransformMakeTranslation(width*sinf(degreesToRadians(degrees2))+height*cosf(degreesToRadians(degrees2)), height*sinf(degreesToRadians(degrees2)));
}
else if(degrees>=-90&&degrees<0){

float degrees2 = degrees-90;
float degreesabs = ABS(degrees);

toDiagonalAngleComple=toDiagonalAngle+degrees2;
toDiagonalAngleComple2=toDiagonalAngle2+degrees2;

finalHeight=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple2)));
finalWidth=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple)));

t1 = CGAffineTransformMakeTranslation(0, width*sinf(degreesToRadians(degreesabs)));

}
else if(degrees>=-180&&degrees<-90){

float degreesabs = ABS(degrees);
float degreesplus = degreesabs-90;

toDiagonalAngleComple=toDiagonalAngle+degrees;
toDiagonalAngleComple2=toDiagonalAngle2+degrees;

finalHeight=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple)));
finalWidth=ABS(toDiagonal*sinf(degreesToRadians(toDiagonalAngleComple2)));

t1 = CGAffineTransformMakeTranslation(width*sinf(degreesToRadians(degreesplus)), height*sinf(degreesToRadians(degreesplus))+width*cosf(degreesToRadians(degreesplus)));

}


// Rotate transformation
t2 = CGAffineTransformRotate(t1, degreesToRadians(degrees));
//t2 = CGAffineTransformRotate(t1, -90);


// Step 3
// Set the appropriate render sizes and rotational transforms
if (!self.mutableVideoComposition) {

// Create a new video composition
self.mutableVideoComposition = [AVMutableVideoComposition videoComposition];
// self.mutableVideoComposition.renderSize = CGSizeMake(assetVideoTrack.naturalSize.height,assetVideoTrack.naturalSize.width);
self.mutableVideoComposition.renderSize = CGSizeMake(finalWidth,finalHeight);

self.mutableVideoComposition.frameDuration = CMTimeMake(1,30);

// The rotate transform is set on a layer instruction
instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [self.mutableComposition duration]);
layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:(self.mutableComposition.tracks)[0]];
[layerInstruction setTransform:t2 atTime:kCMTimeZero];

} else {

self.mutableVideoComposition.renderSize = CGSizeMake(self.mutableVideoComposition.renderSize.height, self.mutableVideoComposition.renderSize.width);

// Extract the existing layer instruction on the mutableVideoComposition
instruction = (self.mutableVideoComposition.instructions)[0];
layerInstruction = (instruction.layerInstructions)[0];

// Check if a transform already exists on this layer instruction, this is done to add the current transform on top of previous edits
CGAffineTransform existingTransform;

if (![layerInstruction getTransformRampForTime:[self.mutableComposition duration] startTransform:&existingTransform endTransform:NULL timeRange:NULL]) {
[layerInstruction setTransform:t2 atTime:kCMTimeZero];
} else {
// Note: the point of origin for rotation is the upper left corner of the composition, t3 is to compensate for origin
CGAffineTransform t3 = CGAffineTransformMakeTranslation(-1*assetVideoTrack.naturalSize.height/2, 0.0);
CGAffineTransform newTransform = CGAffineTransformConcat(existingTransform, CGAffineTransformConcat(t2, t3));
[layerInstruction setTransform:newTransform atTime:kCMTimeZero];
}

}


// Step 4
// Add the transform instructions to the video composition
instruction.layerInstructions = @[layerInstruction];
self.mutableVideoComposition.instructions = @[instruction];


// Step 5
// Notify AVSEViewController about rotation operation completion
// [[NSNotificationCenter defaultCenter] postNotificationName:AVSEEditCommandCompletionNotification object:self];

[self performWithAssetExport];
}

- (void)performWithAssetExport
{
// Step 1
// Create an outputURL to which the exported movie will be saved

NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *outputURL = paths[0];
NSFileManager *manager = [NSFileManager defaultManager];
[manager createDirectoryAtPath:outputURL withIntermediateDirectories:YES attributes:nil error:nil];
outputURL = [outputURL stringByAppendingPathComponent:@"output.mov"];
// Remove Existing File
[manager removeItemAtPath:outputURL error:nil];


// Step 2
// Create an export session with the composition and write the exported movie to the photo library
self.exportSession = [[AVAssetExportSession alloc] initWithAsset:[self.mutableComposition copy] presetName:AVAssetExportPreset1280x720];

self.exportSession.videoComposition = self.mutableVideoComposition;
self.exportSession.audioMix = self.mutableAudioMix;
self.exportSession.outputURL = [NSURL fileURLWithPath:outputURL];
self.exportSession.outputFileType=AVFileTypeQuickTimeMovie;


[self.exportSession exportAsynchronouslyWithCompletionHandler:^(void){
switch (self.exportSession.status) {
case AVAssetExportSessionStatusCompleted:

//[self playfunction];

[[NSNotificationCenter defaultCenter]postNotificationName:@"Backhome" object:nil];



// Step 3
// Notify AVSEViewController about export completion
break;
case AVAssetExportSessionStatusFailed:
NSLog(@"Failed:%@",self.exportSession.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(@"Canceled:%@",self.exportSession.error);
break;
default:
break;
}
}];



}

关于iphone - 如何更改视频方向,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/11232297/

26 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com