原理及步骤:
1、传入视频路径、音乐路径、视频的时长参数等
2、用AVURLAsset分别读取视频、音乐的内容
3、新建一个视频的框架
4、往框架里面添加一条音频轨道和视频轨道
5、把音乐放入音频轨道、视频内容放入视频轨道(如果需要保留原音,读取视频里面的音频内容,也加入音频轨道)
6、导出视频
代码:
/**
添加音频
@param videoUrl 视频url
@param audioUrl 音频url
@param needVoice 是否需要保留原音
@param videoRange 视频的开始时间和时长 (设置和用法如下)
CGFloat ff1 = [self getMediaDurationWithMediaUrl:vpath];
NSMakeRange(0.0, ff1)
@param completionHandle 回调
*/
+ (void)addBackgroundMiusicWithVideoUrlStr:(NSURL *)videoUrl audioUrl:(NSURL *)audioUrl needVoice:(BOOL)needVoice andCaptureVideoWithRange:(NSRange)videoRange completion:(MixcompletionBlock)completionHandle{
//AVURLAsset此类主要用于获取媒体信息,包括视频、声音等
AVURLAsset* audioAsset = [[AVURLAsset alloc] initWithURL:audioUrl options:nil];
AVURLAsset* videoAsset = [[AVURLAsset alloc] initWithURL:videoUrl options:nil];
//创建AVMutableComposition对象来添加视频音频资源的AVMutableCompositionTrack
AVMutableComposition* mixComposition = [AVMutableComposition composition];
//CMTimeRangeMake(start, duration),start起始时间,duration时长,都是CMTime类型
//CMTimeMake(int64_t value, int32_t timescale),返回CMTime,value视频的一个总帧数,timescale是指每秒视频播放的帧数,视频播放速率,(value / timescale)才是视频实际的秒数时长,timescale一般情况下不改变,截取视频长度通过改变value的值
//CMTimeMakeWithSeconds(Float64 seconds, int32_t preferredTimeScale),返回CMTime,seconds截取时长(单位秒),preferredTimeScale每秒帧数
//开始位置startTime
CMTime startTime = CMTimeMakeWithSeconds(videoRange.location, videoAsset.duration.timescale);
//截取长度videoDuration
CMTime videoDuration = CMTimeMakeWithSeconds(videoRange.length, videoAsset.duration.timescale);
CMTimeRange videoTimeRange = CMTimeRangeMake(startTime, videoDuration);
//视频采集compositionVideoTrack
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
//TimeRange截取的范围长度
//ofTrack来源
//atTime插放在视频的时间位置
[compositionVideoTrack insertTimeRange:videoTimeRange ofTrack:([videoAsset tracksWithMediaType:AVMediaTypeVideo].count>0) ? [videoAsset tracksWithMediaType:AVMediaTypeVideo].firstObject : nil atTime:kCMTimeZero error:nil];
if (needVoice) {
//视频声音采集(也可不执行这段代码不采集视频音轨,合并后的视频文件将没有视频原来的声音)
AVMutableCompositionTrack *compositionVoiceTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVoiceTrack insertTimeRange:videoTimeRange ofTrack:([videoAsset tracksWithMediaType:AVMediaTypeAudio].count>0)?[videoAsset tracksWithMediaType:AVMediaTypeAudio].firstObject:nil atTime:kCMTimeZero error:nil];
}
//声音长度截取范围==视频长度
CMTimeRange audioTimeRange = CMTimeRangeMake(kCMTimeZero, videoDuration);
//音频采集compositionCommentaryTrack
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:audioTimeRange ofTrack:([audioAsset tracksWithMediaType:AVMediaTypeAudio].count > 0) ? [audioAsset tracksWithMediaType:AVMediaTypeAudio].firstObject : nil atTime:kCMTimeZero error:nil];
//AVAssetExportSession用于合并文件,导出合并后文件,presetName文件的输出类型
AVAssetExportSession *assetExportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPreset640x480];
NSString *outPutPath =[NSHomeDirectory() stringByAppendingPathComponent:MediaFileName];
//混合后的视频输出路径
NSURL *outPutUrl = [NSURL fileURLWithPath:outPutPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outPutPath])
{
[[NSFileManager defaultManager] removeItemAtPath:outPutPath error:nil];
}
//输出视频格式 AVFileTypeMPEG4 AVFileTypeQuickTimeMovie...
assetExportSession.outputFileType = AVFileTypeMPEG4;
// NSArray *fileTypes = assetExportSession.
assetExportSession.outputURL = outPutUrl;
//输出文件是否网络优化
assetExportSession.shouldOptimizeForNetworkUse = YES;
[assetExportSession exportAsynchronouslyWithCompletionHandler:^{
completionHandle();
}];
}
获取视频长度的代码
/**
根据视频路径获取视频时长
@param mediaUrlStr 本地路径
@return 返回时长
*/
+ (CGFloat)getMediaDurationWithMediaUrl:(NSString *)mediaUrlStr {
NSURL *mediaUrl = [NSURL fileURLWithPath:mediaUrlStr];
AVURLAsset *mediaAsset = [[AVURLAsset alloc] initWithURL:mediaUrl options:nil];
CMTime duration = mediaAsset.duration;
return duration.value / duration.timescale;
}