最近突发奇想想要了解点视频处理的东西,通过 google,github 等资源自给整理了这些资料,网上资源很多,代码部分很多也都是复制粘贴别人的东西,加上一点自己的注释。废话不多说,直接上代码吧,希望能帮到一些 people~
添加水印
p.p1 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px Menlo; color: #4cbf57}p.p2 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px Menlo; color: #00afca}p.p3 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px Menlo; color: #ffffff; min-height: 19.0px}p.p4 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px 'PingFang SC'; color: #4cbf57}p.p5 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px Menlo; color: #ffffff}span.s1 {font-variant-ligatures: no-common-ligatures}span.s2 {font: 16.0px 'PingFang SC'; font-variant-ligatures: no-common-ligatures}span.s3 {font-variant-ligatures: no-common-ligatures; color: #ffffff}span.s4 {font-variant-ligatures: no-common-ligatures; color: #c2349b}span.s5 {font-variant-ligatures: no-common-ligatures; color: #93c86a}span.s6 {font: 16.0px Menlo; font-variant-ligatures: no-common-ligatures; color: #ffffff}span.s7 {font: 16.0px Menlo; font-variant-ligatures: no-common-ligatures}span.s8 {font-variant-ligatures: no-common-ligatures; color: #00afca}span.s9 {font-variant-ligatures: no-common-ligatures; color: #8b84cf}span.s10 {font-variant-ligatures: no-common-ligatures; color: #e44448}
//1 创建AVAsset实例 AVAsset包含了video的所有信息 self.videoUrl输入视频的路径
self.videoAsset = [AVAsset assetWithURL:self.videoUrl];
//2 创建AVMutableComposition实例. apple developer 里边的解释 【AVMutableComposition is a mutable subclass of AVComposition you use when you want to create a new composition from existing assets. You can add and remove tracks, and you can add, remove, and scale time ranges.】
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
//3 视频通道 工程文件中的轨道,有音频轨、视频轨等,里面可以插入各种对应的素材
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
//把视频轨道数据加入到可变轨道中 这部分可以做视频裁剪TimeRange
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration)
ofTrack:[[self.videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
//3.1 AVMutableVideoCompositionInstruction 视频轨道中的一个视频,可以缩放、旋转等
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration);
// 3.2 AVMutableVideoCompositionLayerInstruction 一个视频轨道,包含了这个轨道上的所有视频素材
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
AVAssetTrack *videoAssetTrack = [[self.videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
[videolayerInstruction setOpacity:0.0 atTime:self.videoAsset.duration];
// 3.3 - Add instructions
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
//AVMutableVideoComposition:管理所有视频轨道,可以决定最终视频的尺寸,裁剪需要在这里进行
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
CGSize naturalSize;
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
} else {
naturalSize = videoAssetTrack.naturalSize;
}
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
[self applyVideoEffectsToComposition:mainCompositionInst size:naturalSize];
// 4 - 输出路径
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:@"FinalVideo-%d.mov",arc4random() % 1000]];
self.videoUrl = [NSURL fileURLWithPath:myPathDocs];
// 5 - 视频文件输出
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=self.videoUrl;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainCompositionInst;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
//这里是输出视频之后的操作,做你想做的
[self exportDidFinish:exporter];
});
}];
视频裁剪
p.p1 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px Menlo; color: #4cbf57}p.p2 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px Menlo; color: #00afca}p.p3 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px Menlo; color: #ffffff; min-height: 19.0px}p.p4 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px 'PingFang SC'; color: #4cbf57}p.p5 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px Menlo; color: #ffffff}span.s1 {font-variant-ligatures: no-common-ligatures}span.s2 {font: 16.0px 'PingFang SC'; font-variant-ligatures: no-common-ligatures}span.s3 {font-variant-ligatures: no-common-ligatures; color: #ffffff}span.s4 {font-variant-ligatures: no-common-ligatures; color: #c2349b}span.s5 {font-variant-ligatures: no-common-ligatures; color: #93c86a}span.s6 {font: 16.0px Menlo; font-variant-ligatures: no-common-ligatures; color: #ffffff}span.s7 {font: 16.0px Menlo; font-variant-ligatures: no-common-ligatures}span.s8 {font-variant-ligatures: no-common-ligatures; color: #00afca}span.s9 {font-variant-ligatures: no-common-ligatures; color: #8b84cf}span.s10 {font-variant-ligatures: no-common-ligatures; color: #e44448}
//1 — 采集
self.videoAsset = [AVAsset assetWithURL:self.videoUrl];
// 2 创建AVMutableComposition实例. apple developer 里边的解释 【AVMutableComposition is a mutable subclass of AVComposition you use when you want to create a new composition from existing assets. You can add and remove tracks, and you can add, remove, and scale time ranges.】
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
// 3 - 视频通道 工程文件中的轨道,有音频轨、视频轨等,里面可以插入各种对应的素材
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
NSError *error = nil;
// 这块是裁剪,rangtime .前面的是开始时间,后面是裁剪多长 (我这裁剪的是从第二秒开始裁剪,裁剪2.55秒时长.)
[videoTrack insertTimeRange:CMTimeRangeMake(CMTimeMakeWithSeconds(2.0f, 30), CMTimeMakeWithSeconds(2.55f, 30))
ofTrack:[[self.videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero
error:&error];
// 3.1 AVMutableVideoCompositionInstruction 视频轨道中的一个视频,可以缩放、旋转等
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration);
// 3.2 AVMutableVideoCompositionLayerInstruction 一个视频轨道,包含了这个轨道上的所有视频素材
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
AVAssetTrack *videoAssetTrack = [[self.videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
[videolayerInstruction setOpacity:0.0 atTime:self.videoAsset.duration];
// 3.3 - Add instructions
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
// AVMutableVideoComposition:管理所有视频轨道,可以决定最终视频的尺寸,裁剪需要在这里进行
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
CGSize naturalSize;
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
} else {
naturalSize = videoAssetTrack.naturalSize;
}
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
// 4 - Get path
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:@"FinalVideo-%d.mov",arc4random() % 1000]];
self.videoUrl = [NSURL fileURLWithPath:myPathDocs];
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=self.videoUrl;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainCompositionInst;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
});
}];
合并视频
p.p1 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px Menlo; color: #00afca}p.p2 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px Menlo; color: #ffffff}p.p3 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px Menlo; color: #ffffff; min-height: 19.0px}p.p4 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px Menlo; color: #4cbf57}p.p5 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px 'PingFang SC'; color: #4cbf57}span.s1 {font-variant-ligatures: no-common-ligatures; color: #ffffff}span.s2 {font-variant-ligatures: no-common-ligatures}span.s3 {font-variant-ligatures: no-common-ligatures; color: #c2349b}span.s4 {font-variant-ligatures: no-common-ligatures; color: #93c86a}span.s5 {font-variant-ligatures: no-common-ligatures; color: #00afca}span.s6 {font-variant-ligatures: no-common-ligatures; color: #8b84cf}span.s7 {font: 16.0px Menlo; font-variant-ligatures: no-common-ligatures; color: #ffffff}span.s8 {font: 16.0px Menlo; font-variant-ligatures: no-common-ligatures}span.s9 {font-variant-ligatures: no-common-ligatures; color: #e44448}
AVAsset *firstAsset = [AVAsset assetWithURL:self.videoUrl];
AVAsset *secondAsset = [AVAsset assetWithURL:self.videoUrl];
// 1 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
// 2 - Video track
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration)
ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration)
ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:firstAsset.duration error:nil];
// 3 - Audio track
//这一段貌似是添加背景音乐的
// if (audioAsset!=nil){
// AVMutableCompositionTrack *AudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
// preferredTrackID:kCMPersistentTrackID_Invalid];
// [AudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration))
// ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
// }
// 4 - Get path
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:@"mergeVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
});
}];
添加背景音乐
p.p1 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px 'PingFang SC'; color: #4cbf57}p.p2 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px Menlo; color: #ffffff}p.p3 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px Menlo; color: #00afca}p.p4 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px Menlo; color: #ffffff; min-height: 19.0px}p.p5 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px Menlo; color: #4cbf57}p.p6 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px Menlo; color: #d28f5a}span.s1 {font: 16.0px Menlo; font-variant-ligatures: no-common-ligatures}span.s2 {font-variant-ligatures: no-common-ligatures}span.s3 {font-variant-ligatures: no-common-ligatures; color: #00afca}span.s4 {font-variant-ligatures: no-common-ligatures; color: #c2349b}span.s5 {font-variant-ligatures: no-common-ligatures; color: #93c86a}span.s6 {font-variant-ligatures: no-common-ligatures; color: #d28f5a}span.s7 {font: 16.0px Menlo; font-variant-ligatures: no-common-ligatures; color: #ffffff}span.s8 {font-variant-ligatures: no-common-ligatures; color: #ffffff}span.s9 {font-variant-ligatures: no-common-ligatures; color: #e44448}span.s10 {font-variant-ligatures: no-common-ligatures; color: #8b84cf}span.s11 {font: 16.0px 'PingFang SC'; font-variant-ligatures: no-common-ligatures}span.s12 {font-variant-ligatures: no-common-ligatures; color: #4cbf57}span.s13 {font: 16.0px 'PingFang SC'; font-variant-ligatures: no-common-ligatures; color: #4cbf57}
//视频 声音 来源
NSURL * videoInputUrl = self.videoUrl;
NSURL * audioInputUrl = AUDIO_URL;
//合成之后的输出路径
NSString *outPutPath = [NSTemporaryDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:@"mergeVideo-%d.mov",arc4random() % 1000]];
//混合后的视频输出路径
NSURL *outPutUrl = [NSURL fileURLWithPath:outPutPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outPutPath])
{
[[NSFileManager defaultManager] removeItemAtPath:outPutPath error:nil];
}
//时间起点
CMTime nextClistartTime = kCMTimeZero;
//创建可变的音视频组合
AVMutableComposition * comosition = [AVMutableComposition composition];
//视频采集
AVURLAsset * videoAsset = [[AVURLAsset alloc] initWithURL:videoInputUrl options:nil];
//视频时间范围
CMTimeRange videoTimeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
// 视频通道 枚举 kCMPersistentTrackID_Invalid = 0
AVMutableCompositionTrack * videoTrack = [comosition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
//视频采集通道
AVAssetTrack * videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] firstObject];
//把采集轨道数据加入到可变轨道中
[videoTrack insertTimeRange:videoTimeRange ofTrack:videoAssetTrack atTime:nextClistartTime error:nil];
//声音采集
AVURLAsset * audioAsset = [[AVURLAsset alloc] initWithURL:audioInputUrl options:nil];
//因为视频较短 所以直接用了视频的长度 如果想要自动化需要自己写判断
CMTimeRange audioTimeRange = videoTimeRange;
//音频通道
AVMutableCompositionTrack * audioTrack = [comosition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
//音频采集通道
AVAssetTrack * audioAssetTrack = [[audioAsset tracksWithMediaType:AVMediaTypeAudio] firstObject];
//加入合成轨道中
[audioTrack insertTimeRange:audioTimeRange ofTrack:audioAssetTrack atTime:nextClistartTime error:nil];
#warning test
// 3.1 - Create AVMutableVideoCompositionInstruction
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
// 3.2 - Create an AVMutableVideoCompositionLayerInstruction for the video track and fix the orientation.
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
[videolayerInstruction setOpacity:0.0 atTime:videoAsset.duration];
// 3.3 - Add instructions
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
CGSize naturalSize;
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
} else {
naturalSize = videoAssetTrack.naturalSize;
}
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
#warning test end 如果没有这段代码,合成后的视频会旋转90度
//创建输出
AVAssetExportSession * assetExport = [[AVAssetExportSession alloc] initWithAsset:comosition presetName:AVAssetExportPresetMediumQuality];
assetExport.outputURL = outPutUrl;//输出路径
assetExport.outputFileType = AVFileTypeQuickTimeMovie;//输出类型
assetExport.shouldOptimizeForNetworkUse = YES;//是否优化 不太明白
assetExport.videoComposition = mainCompositionInst;
[assetExport exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:assetExport];
});
}];
获取视频每一帧的图片
p.p1 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px Menlo; color: #ffffff}p.p2 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px Menlo; color: #ffffff; min-height: 19.0px}p.p3 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px Menlo; color: #00afca}p.p4 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px Menlo; color: #d28f5a}p.p5 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px 'PingFang SC'; color: #4cbf57}p.p6 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px Menlo; color: #4cbf57}p.p7 {margin: 0.0px 0.0px 0.0px 0.0px; font: 16.0px Menlo; color: #4cbf57; min-height: 19.0px}span.s1 {font-variant-ligatures: no-common-ligatures}span.s2 {font-variant-ligatures: no-common-ligatures; color: #00afca}span.s3 {font-variant-ligatures: no-common-ligatures; color: #ffffff}span.s4 {font-variant-ligatures: no-common-ligatures; color: #c2349b}span.s5 {font-variant-ligatures: no-common-ligatures; color: #93c86a}span.s6 {font-variant-ligatures: no-common-ligatures; color: #e44448}span.s7 {font-variant-ligatures: no-common-ligatures; color: #8b84cf}span.s8 {font-variant-ligatures: no-common-ligatures; color: #4cbf57}span.s9 {font: 16.0px Menlo; font-variant-ligatures: no-common-ligatures}span.s10 {font: 16.0px 'PingFang SC'; font-variant-ligatures: no-common-ligatures; color: #4cbf57}
+ (NSArray *) thumbnailImageForVideo:(NSURL *)videoURL atTime:(NSTimeInterval)time
{
NSMutableArray * returnArr = [NSMutableArray array];
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:videoURL options:nil] ;
NSParameterAssert(asset);
AVAssetImageGenerator *assetImageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:asset] ;
assetImageGenerator.appliesPreferredTrackTransform = YES;
assetImageGenerator.apertureMode = AVAssetImageGeneratorApertureModeEncodedPixels;
NSInteger alltime = [[self alloc] getVideoAllTimeWith:videoURL];
NSLog(@"%ld",(long)alltime);
for (int i = 0; i < alltime * 2; i++) {
CGImageRef thumbnailImageRef = NULL;
CFTimeInterval thumbnailImageTime = i * 60 / 2;
CMTime resultTime = CMTimeMake(thumbnailImageTime, 60);
CMTimeShow(resultTime);
NSError *thumbnailImageGenerationError = nil;
thumbnailImageRef = [assetImageGenerator copyCGImageAtTime:resultTime actualTime:NULL error:&thumbnailImageGenerationError];
if (!thumbnailImageRef)
NSLog(@"thumbnailImageGenerationError %@", thumbnailImageGenerationError);
UIImage *thumbnailImage = thumbnailImageRef ? [[UIImage alloc] initWithCGImage:thumbnailImageRef] : nil;
if (thumbnailImage) {
[returnArr addObject:thumbnailImage];
}
}
/*
这个方法原本是要传一个 time 值,根据 time 返回该时间的图片
CGImageRef thumbnailImageRef = NULL;
CFTimeInterval thumbnailImageTime = time * 60;
CMTime resultTime = CMTimeMake(thumbnailImageTime, 60);
CMTimeShow(resultTime);
NSError *thumbnailImageGenerationError = nil;
thumbnailImageRef = [assetImageGenerator copyCGImageAtTime:resultTime actualTime:NULL error:&thumbnailImageGenerationError];
if (!thumbnailImageRef)
NSLog(@"thumbnailImageGenerationError %@", thumbnailImageGenerationError);
UIImage *thumbnailImage = thumbnailImageRef ? [[UIImage alloc] initWithCGImage:thumbnailImageRef] : nil;
if (thumbnailImage) {
[returnArr addObject:thumbnailImage];
}
*/
return returnArr;
}
-(NSInteger )getVideoAllTimeWith:(NSURL *)url
{
NSDictionary *opts = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:NO]
forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
AVURLAsset *urlAsset = [AVURLAsset URLAssetWithURL:url options:opts]; // 初始化视频媒体文件
int minute = 0, second = 0;
second = ceil(urlAsset.duration.value / urlAsset.duration.timescale); // 获取视频总时长,单位秒
//NSLog(@"movie duration : %d", second);
// if (second >= 60) {
// int index = second / 60;
// minute = index;
// second = second - index*60;
// }
return second;
}
代码已经上传到 github 了,[github](https://github.com/531464049/Video-Edit),自己也是一个新手,so,代码质量可能很低,逻辑也不太好,正在一点点努力。
其实开始研究这些东西时是为了实现一个效果【在视频的某一时间点,贴一张图片,类似于在某个地方加马赛克一样,自己查了很多资料还是不知道该怎么做】,我的想法是:这个效果应该跟添加水印的原理差不多,只不过水印是添加到整个视频,而我想要的效果是在视频的某一帧或者某一个时间段添加一个图片。