几个重要的Point:
1.使用AVfoundation速度快,性能好;使用GPUImage兼容性好。综合业务需求自行选择。
2.视频的剪切,拼接操作都是由insertTimeRange控制。另外此处涉及到CMTime相关知识,就不再赘述,不懂的童鞋可自行Google。
[videoTrack insertTimeRange:CMTimeRangeMake(startTime, duration) ofTrack:videoAssetTrack atTime:kCMTimeZero error:nil];
3.关于视频尺寸的裁剪(长方形视频,裁剪为正方形),由renderSize控制。
videoComposition.renderSize = CGSizeMake(renderWidth, renderHeight);
4.但是裁剪出来的正方形视频不是居中裁剪的啊,那就使用平移大法:
// 控制裁剪居中
videoTransform = CGAffineTransformTranslate(videoTransform, 0, -(naturalSize.height - naturalSize.width) / 2.f);
好了,废话不多说,直接上代码:
- (void)addWaterMarkByAVFoundation {
NSURL *videoURL = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"IMG_4400" ofType:@"m4v"]];
NSDictionary *options = [NSDictionary dictionaryWithObject:@(YES) forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
AVURLAsset *videoAsset = [AVURLAsset URLAssetWithURL:videoURL options:options];
CMTime startTime = CMTimeMake(0, videoAsset.duration.timescale);
CMTime duration = CMTimeMake(videoAsset.duration.value, videoAsset.duration.timescale);
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
// 视频轨道
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *videoAssetTrack = [videoAsset tracksWithMediaType:AVMediaTypeVideo].firstObject;
[videoTrack insertTimeRange:CMTimeRangeMake(startTime, duration) ofTrack:videoAssetTrack atTime:kCMTimeZero error:nil];
// 音频轨道
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *audioAssetTrack = [videoAsset tracksWithMediaType:AVMediaTypeAudio].firstObject;
[audioTrack insertTimeRange:CMTimeRangeMake(startTime, duration) ofTrack:audioAssetTrack atTime:kCMTimeZero error:nil];
// 视频方向
BOOL isVideoAssetPortrait = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
isVideoAssetPortrait = YES;
}else if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
isVideoAssetPortrait = YES;
}
// 渲染尺寸
CGSize naturalSize;
if (isVideoAssetPortrait) {
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
}else {
naturalSize = videoAssetTrack.naturalSize;
}
CGFloat renderWidth = naturalSize.width;
CGFloat renderHeight = naturalSize.width;
// 裁剪居中
videoTransform = CGAffineTransformTranslate(videoTransform, 0, -(naturalSize.height - naturalSize.width) / 2.f);
AVAssetTrack *track = [mixComposition tracksWithMediaType:AVMediaTypeVideo].firstObject;
AVMutableVideoCompositionLayerInstruction *videoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:track];
[videoLayerInstruction setTransform:videoTransform atTime:kCMTimeZero];
[videoLayerInstruction setOpacity:0.0 atTime:duration];
AVMutableVideoCompositionInstruction *videoInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
videoInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, duration);
videoInstruction.layerInstructions = @[videoLayerInstruction];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.renderSize = CGSizeMake(renderWidth, renderHeight);
videoComposition.instructions = @[videoInstruction];
videoComposition.frameDuration = CMTimeMake(1, 25);
[self applyVideoEffectsToComposition:videoComposition size:CGSizeMake(renderWidth, renderHeight)];
// 输出路径
NSString *outputPath = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/Movie.mp4"];
unlink([outputPath UTF8String]); // 删除当前该路径下的文件
NSURL *outputURL = [NSURL fileURLWithPath:outputPath];
_exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_exporter.videoComposition = videoComposition;
_exporter.outputURL = outputURL;
_exporter.outputFileType = AVFileTypeQuickTimeMovie;
_exporter.shouldOptimizeForNetworkUse = YES;
__weak typeof(self) weakSelf = self;
[_exporter exportAsynchronouslyWithCompletionHandler:^{
// 视频输出完成
switch (weakSelf.exporter.status) {
case AVAssetExportSessionStatusCompleted:
[weakSelf saveVideoToLibrary:outputPath];
break;
case AVAssetExportSessionStatusFailed:
kShowPopTip(nil, @"导出失败", nil);
break;
case AVAssetExportSessionStatusCancelled:
kShowPopTip(nil, @"导出取消", nil);
break;
default:
break;
}
}];
}
- (void)saveVideoToLibrary:(NSString *)outputPath {
if ([PHPhotoLibrary authorizationStatus] == PHAuthorizationStatusAuthorized) {
__block PHObjectPlaceholder *placeholder;
if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(outputPath)) {
NSError *error;
[[PHPhotoLibrary sharedPhotoLibrary] performChangesAndWait:^{
PHAssetChangeRequest* createAssetRequest = [PHAssetChangeRequest creationRequestForAssetFromVideoAtFileURL:[NSURL URLWithString:outputPath]];
placeholder = [createAssetRequest placeholderForCreatedAsset];
} error:&error];
if (error) {
kShowPopTip(self.view, error.description, nil);
}else {
kShowPopTip(self.view, @"视频已保存到相册", nil);
}
}
}
}
- (void)applyVideoEffectsToComposition:(AVMutableVideoComposition *)videoComposition size:(CGSize)size {
NSString *text = @"@Ander";
UIFont *font = kFontFromPx(100);
CGFontRef fontRef = CGFontCreateWithFontName((__bridge CFStringRef)font.fontName);
CATextLayer *textLayer = [[CATextLayer alloc] init];
textLayer.fontSize = font.pointSize;
textLayer.font = fontRef;
textLayer.string = text;
textLayer.alignmentMode = kCAAlignmentCenter;
textLayer.foregroundColor = kColorWithRGB(0xFFFFFF).CGColor;
textLayer.backgroundColor = [UIColor redColor].CGColor;
CGSize textSize = [text sizeWithAttributes:[NSDictionary dictionaryWithObjectsAndKeys:font, NSFontAttributeName, nil]];
textLayer.frame = CGRectMake(50, 100, textSize.width + 20, textSize.height + 20);
CGFontRelease(fontRef);
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, size.width, size.height);
videoLayer.frame = CGRectMake(0, 0, size.width, size.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:textLayer];
videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
}