const GLfloat kColorConversion601[] = {//标准清晰度颜色转换矩阵
1.164, 1.164, 1.164,
0.0, -0.392, 2.017,
1.596, -0.813, 0.0,
};
const GLfloat kColorConversion709[] = {//高清颜色转换矩阵
1.164, 1.164, 1.164,
0.0, -0.213, 2.112,
1.793, -0.533, 0.0,
};
const GLfloat kColorConversion601FullRange[] = {//全彩颜色转换矩阵
1.0, 1.0, 1.0,
0.0, -0.343, 1.765,
1.4, -0.711, 0.0,
};
NSString *const kGPUImageYUVVideoRangeConversionForRGFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D luminanceTexture;
uniform sampler2D chrominanceTexture;
uniform mediump mat3 colorConversionMatrix;
void main()
{
mediump vec3 yuv;
lowp vec3 rgb;
yuv.x = texture2D(luminanceTexture, textureCoordinate).r;
yuv.yz = texture2D(chrominanceTexture, textureCoordinate).rg - vec2(0.5, 0.5);
rgb = colorConversionMatrix * yuv;
gl_FragColor = vec4(rgb, 1);
}
);
NSString *const kGPUImageYUVFullRangeConversionForLAFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D luminanceTexture;
uniform sampler2D chrominanceTexture;
uniform mediump mat3 colorConversionMatrix;
void main()
{
mediump vec3 yuv;
lowp vec3 rgb;
yuv.x = texture2D(luminanceTexture, textureCoordinate).r;
yuv.yz = texture2D(chrominanceTexture, textureCoordinate).ra - vec2(0.5, 0.5);
rgb = colorConversionMatrix * yuv;
gl_FragColor = vec4(rgb, 1);
}
);
NSString *const kGPUImageYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D luminanceTexture;
uniform sampler2D chrominanceTexture;
uniform mediump mat3 colorConversionMatrix;
void main()
{
mediump vec3 yuv;
lowp vec3 rgb;
yuv.x = texture2D(luminanceTexture, textureCoordinate).r - (16.0/255.0);
yuv.yz = texture2D(chrominanceTexture, textureCoordinate).ra - vec2(0.5, 0.5);
rgb = colorConversionMatrix * yuv;
gl_FragColor = vec4(rgb, 1);
}
);
@interface GPUImageVideoCamera ()
{
AVCaptureDeviceInput *audioInput;//音频输入
AVCaptureAudioDataOutput *audioOutput;//音频数据输出
NSDate *startingCaptureTime;//开始采集时间
dispatch_queue_t cameraProcessingQueue, audioProcessingQueue;//视频处理队列、音频处理队列
GLProgram *yuvConversionProgram;//源程序
GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute;//顶点属性、纹理属性
GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform;//亮度常量、色度常量
GLint yuvConversionMatrixUniform;//yuv转换矩阵
const GLfloat *_preferredConversion;//转换模式
BOOL isFullYUVRange;//全彩
int imageBufferWidth, imageBufferHeight;//宽、高
BOOL addedAudioInputsDueToEncodingTarget;//是否添加音频输入和编码对象
}
- (void)updateOrientationSendToTargets;//更新方向到目标
- (void)convertYUVToRGBOutput;//将yup数据转换为RGB输出
@end
@implementation GPUImageVideoCamera
@synthesize captureSessionPreset = _captureSessionPreset;
@synthesize captureSession = _captureSession;
@synthesize inputCamera = _inputCamera;
@synthesize runBenchmark = _runBenchmark;
@synthesize outputImageOrientation = _outputImageOrientation;
@synthesize delegate = _delegate;
@synthesize horizontallyMirrorFrontFacingCamera = _horizontallyMirrorFrontFacingCamera, horizontallyMirrorRearFacingCamera = _horizontallyMirrorRearFacingCamera;
@synthesize frameRate = _frameRate;
- (id)init;
{
if (!(self = [self initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack]))
{
return nil;
}
return self;
}
- (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition;
{
if (!(self = [super init]))
{
return nil;
}
cameraProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH,0);//创建视频处理队列
audioProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW,0);//音频处理队列
frameRenderingSemaphore = dispatch_semaphore_create(1);//创建帧渲染信号量
_frameRate = 0; // 帧率
_runBenchmark = NO;
capturePaused = NO;//是否暂停
outputRotation = kGPUImageNoRotation;//输出旋转
internalRotation = kGPUImageNoRotation;//输入旋转
captureAsYUV = YES;//yuv格式采集
_preferredConversion = kColorConversion709;//转换矩阵为高清
_inputCamera = nil;
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices)
{
if ([device position] == cameraPosition)
{
_inputCamera = device;
}
}//得到摄像头
if (!_inputCamera) {
return nil;
}
_captureSession = [[AVCaptureSession alloc] init];//初始化会话层
[_captureSession beginConfiguration];//开始采集
NSError *error = nil;
videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:_inputCamera error:&error];
if ([_captureSession canAddInput:videoInput])
{
[_captureSession addInput:videoInput];
}//视频数据输入
videoOutput = [[AVCaptureVideoDataOutput alloc] init];//视频数据输出
[videoOutput setAlwaysDiscardsLateVideoFrames:NO];
if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])
{
BOOL supportsFullYUVRange = NO;
NSArray *supportedPixelFormats = videoOutput.availableVideoCVPixelFormatTypes;
for (NSNumber *currentPixelFormat in supportedPixelFormats)
{
if ([currentPixelFormat intValue] == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
{
supportsFullYUVRange = YES;
}
}//是否支持全彩
if (supportsFullYUVRange)
{
[videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];//视频数据输出配置->色素格式
isFullYUVRange = YES;
}
else
{
[videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];//视频数据输出配置->色素格式
isFullYUVRange = NO;
}
}
else
{
[videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];//视频数据输出配置->色素格式
}
runSynchronouslyOnVideoProcessingQueue(^{//同步视频数据处理
if (captureAsYUV)//采集格式为YUV像是你的本能,做才是你能力和水平的体现,少想多做
{
[GPUImageContext useImageProcessingContext];//上下文环境即绘制环境
if (isFullYUVRange)
{
yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVFullRangeConversionForLAFragmentShaderString];//openGL源程序初始化
}
else
{
yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForLAFragmentShaderString];//openGL源程序初始化
}
if (!yuvConversionProgram.initialized)
{
[yuvConversionProgram addAttribute:@"position"];
[yuvConversionProgram addAttribute:@"inputTextureCoordinate"];
if (![yuvConversionProgram link])
{
NSString *progLog = [yuvConversionProgram programLog];
NSLog(@"Program link log: %@", progLog);
NSString *fragLog = [yuvConversionProgram fragmentShaderLog];
NSLog(@"Fragment shader compile log: %@", fragLog);
NSString *vertLog = [yuvConversionProgram vertexShaderLog];
NSLog(@"Vertex shader compile log: %@", vertLog);
yuvConversionProgram = nil;
NSAssert(NO, @"Filter shader link failed");
}
}
yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@"position"];
yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@"inputTextureCoordinate"];
yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@"luminanceTexture"];
yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@"chrominanceTexture"];
yuvConversionMatrixUniform = [yuvConversionProgram uniformIndex:@"colorConversionMatrix"];
[GPUImageContext setActiveShaderProgram:yuvConversionProgram];
glEnableVertexAttribArray(yuvConversionPositionAttribute);
glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute);
}
});
[videoOutput setSampleBufferDelegate:self queue:cameraProcessingQueue];
if ([_captureSession canAddOutput:videoOutput])
{
[_captureSession addOutput:videoOutput];
}
else
{
NSLog(@"Couldn't add video output");
return nil;
}
_captureSessionPreset = sessionPreset;
[_captureSession setSessionPreset:_captureSessionPreset];
[_captureSession commitConfiguration];
return self;
}
- (GPUImageFramebuffer *)framebufferForOutput;
{
return outputFramebuffer;
}
- (void)dealloc
{
[self stopCameraCapture];
[videoOutput setSampleBufferDelegate:nil queue:dispatch_get_main_queue()];
[audioOutput setSampleBufferDelegate:nil queue:dispatch_get_main_queue()];
[self removeInputsAndOutputs];
#if !OS_OBJECT_USE_OBJC(IOS6以上)
if (frameRenderingSemaphore != NULL)
{
dispatch_release(frameRenderingSemaphore);
}
#endif
}
- (BOOL)addAudioInputsAndOutputs//添加音频输入和输出
{
if (audioOutput)
return NO;
[_captureSession beginConfiguration];
_microphone = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
audioInput = [AVCaptureDeviceInput deviceInputWithDevice:_microphone error:nil];
if ([_captureSession canAddInput:audioInput])
{
[_captureSession addInput:audioInput];
}
audioOutput = [[AVCaptureAudioDataOutput alloc] init];
if ([_captureSession canAddOutput:audioOutput])
{
[_captureSession addOutput:audioOutput];
}
else
{
NSLog(@"Couldn't add audio output");
}
[audioOutput setSampleBufferDelegate:self queue:audioProcessingQueue];
[_captureSession commitConfiguration];
return YES;
}
- (BOOL)removeAudioInputsAndOutputs
{
if (!audioOutput)
return NO;
[_captureSession beginConfiguration];//通用操作要对会话层进行操作首先要开始配置
[_captureSession removeInput:audioInput];
[_captureSession removeOutput:audioOutput];
audioInput = nil;
audioOutput = nil;
_microphone = nil;
[_captureSession commitConfiguration];//修改完成要记得提交操作
return YES;
}
- (void)removeInputsAndOutputs;
{
[_captureSession beginConfiguration];
if (videoInput) {
[_captureSession removeInput:videoInput];
[_captureSession removeOutput:videoOutput];
videoInput = nil;
videoOutput = nil;
}
if (_microphone != nil)
{
[_captureSession removeInput:audioInput];
[_captureSession removeOutput:audioOutput];
audioInput = nil;
audioOutput = nil;
_microphone = nil;
}
[_captureSession commitConfiguration];
}
//重写父类的添加对象方法
- (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;
{
[super addTarget:newTarget atTextureLocation:textureLocation];//先执行父类的方法
[newTarget setInputRotation:outputRotation atIndex:textureLocation];//签署协议,等待执行
}
- (void)startCameraCapture;//开始采集
{
if (![_captureSession isRunning])
{
startingCaptureTime = [NSDate date];
[_captureSession startRunning];
};
}
- (void)stopCameraCapture;//停止采集
{
if ([_captureSession isRunning])
{
[_captureSession stopRunning];
}
}
- (void)pauseCameraCapture;//暂停采集
{
capturePaused = YES;
}
- (void)resumeCameraCapture;//恢复采集
{
capturePaused = NO;
}
- (void)rotateCamera//转换摄像头
{
if (self.frontFacingCameraPresent == NO)
return;
NSError *error;
AVCaptureDeviceInput *newVideoInput;
AVCaptureDevicePosition currentCameraPosition = [[videoInput device] position];
if (currentCameraPosition == AVCaptureDevicePositionBack)
{
currentCameraPosition = AVCaptureDevicePositionFront;
}
else
{
currentCameraPosition = AVCaptureDevicePositionBack;
}
AVCaptureDevice *backFacingCamera = nil;
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices)
{
if ([device position] == currentCameraPosition)
{
backFacingCamera = device;
}
}
newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:backFacingCamera error:&error];
if (newVideoInput != nil)
{
[_captureSession beginConfiguration];
[_captureSession removeInput:videoInput];
if ([_captureSession canAddInput:newVideoInput])
{
[_captureSession addInput:newVideoInput];
videoInput = newVideoInput;
}
else
{
[_captureSession addInput:videoInput];
}
//captureSession.sessionPreset = oriPreset;
[_captureSession commitConfiguration];
}
_inputCamera = backFacingCamera;
[self setOutputImageOrientation:_outputImageOrientation];
}
- (AVCaptureDevicePosition)cameraPosition //get Camera Position
{
return [[videoInput device] position];
}
+ (BOOL)isBackFacingCameraPresent;
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices)
{
if ([device position] == AVCaptureDevicePositionBack)
return YES;
}
return NO;
}
- (BOOL)isBackFacingCameraPresent
{
return [GPUImageVideoCamera isBackFacingCameraPresent];
}
+ (BOOL)isFrontFacingCameraPresent;
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices)
{
if ([device position] == AVCaptureDevicePositionFront)
return YES;
}
return NO;
}
- (BOOL)isFrontFacingCameraPresent
{
return [GPUImageVideoCamera isFrontFacingCameraPresent];
}
- (void)setCaptureSessionPreset:(NSString *)captureSessionPreset;
{
[_captureSession beginConfiguration];
_captureSessionPreset = captureSessionPreset;
[_captureSession setSessionPreset:_captureSessionPreset];
[_captureSession commitConfiguration];
}
- (void)setFrameRate:(int32_t)frameRate;
{
_frameRate = frameRate;
if (_frameRate > 0)
{
if ([_inputCamera respondsToSelector:@selector(setActiveVideoMinFrameDuration:)] &&
[_inputCamera respondsToSelector:@selector(setActiveVideoMaxFrameDuration:)]) {
NSError *error;
[_inputCamera lockForConfiguration:&error];
if (error == nil) {
#if defined(__IPHONE_7_0)
[_inputCamera setActiveVideoMinFrameDuration:CMTimeMake(1, _frameRate)];
[_inputCamera setActiveVideoMaxFrameDuration:CMTimeMake(1, _frameRate)];
#endif
}
[_inputCamera unlockForConfiguration];
} else {
for (AVCaptureConnection *connection in videoOutput.connections)
{
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
if ([connection respondsToSelector:@selector(setVideoMinFrameDuration:)])
connection.videoMinFrameDuration = CMTimeMake(1, _frameRate);
if ([connection respondsToSelector:@selector(setVideoMaxFrameDuration:)])
connection.videoMaxFrameDuration = CMTimeMake(1, _frameRate);
#pragma clang diagnostic pop
}
}
}
else
{
if ([_inputCamera respondsToSelector:@selector(setActiveVideoMinFrameDuration:)] &&
[_inputCamera respondsToSelector:@selector(setActiveVideoMaxFrameDuration:)]) {
NSError *error;
[_inputCamera lockForConfiguration:&error];
if (error == nil) {
#if defined(__IPHONE_7_0)
[_inputCamera setActiveVideoMinFrameDuration:kCMTimeInvalid];
[_inputCamera setActiveVideoMaxFrameDuration:kCMTimeInvalid];
#endif
}
[_inputCamera unlockForConfiguration];
} else {
for (AVCaptureConnection *connection in videoOutput.connections)
{
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
if ([connection respondsToSelector:@selector(setVideoMinFrameDuration:)])
connection.videoMinFrameDuration = kCMTimeInvalid; // This sets videoMinFrameDuration back to default
if ([connection respondsToSelector:@selector(setVideoMaxFrameDuration:)])
connection.videoMaxFrameDuration = kCMTimeInvalid; // This sets videoMaxFrameDuration back to default
#pragma clang diagnostic pop
}
}
}
}
- (int32_t)frameRate;
{
return _frameRate;
}
- (AVCaptureConnection *)videoCaptureConnection {
for (AVCaptureConnection *connection in [videoOutput connections] ) {
for ( AVCaptureInputPort *port in [connection inputPorts] ) {
if ( [[port mediaType] isEqual:AVMediaTypeVideo] ) {
return connection;
}
}
}
return nil;
}
#define INITIALFRAMESTOIGNOREFORBENCHMARK 5
- (void)updateTargetsForVideoCameraUsingCacheTextureAtWidth:(int)bufferWidth height:(int)bufferHeight time:(CMTime)currentTime;//更新纹理
{
// First, update all the framebuffers in the targets
for (id<GPUImageInput> currentTarget in targets)
{
if ([currentTarget enabled])
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];//获得纹理索引
NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];//同上
if (currentTarget != self.targetToIgnoreForUpdates)
{
[currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];//签署协议等待处理 旋转模式
[currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:textureIndexOfTarget];//签署协议等待处理 输入分辨率
if ([currentTarget wantsMonochromeInput] && captureAsYUV)//黑白 yuv采集
{
[currentTarget setCurrentlyReceivingMonochromeInput:YES];
// TODO: Replace optimization for monochrome output
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];//签署协议等待处理 输入frameBuffer 里面存放的是像素信息
}
else
{
[currentTarget setCurrentlyReceivingMonochromeInput:NO];
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
}
}
else
{
[currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
}
}
}
[outputFramebuffer unlock];//解锁
outputFramebuffer = nil;
// Finally, trigger rendering as needed
for (id<GPUImageInput> currentTarget in targets)
{
if ([currentTarget enabled])
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
if (currentTarget != self.targetToIgnoreForUpdates)
{
[currentTarget newFrameReadyAtTime:currentTime atIndex:textureIndexOfTarget];////签署协议等待处理 纹理准备完毕
}
}
}
}
- (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;//处理视频buffer
{
if (capturePaused)
{
return;
}
CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();//获得开始时间
CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer);//视频帧
int bufferWidth = (int) CVPixelBufferGetWidth(cameraFrame);//视频宽
int bufferHeight = (int) CVPixelBufferGetHeight(cameraFrame);//视频高
CFTypeRef colorAttachments = CVBufferGetAttachment(cameraFrame, kCVImageBufferYCbCrMatrixKey, NULL);//颜色属性
if (colorAttachments != NULL)
{
if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo)
{
if (isFullYUVRange)
{
_preferredConversion = kColorConversion601FullRange;
}
else
{
_preferredConversion = kColorConversion601;
}
}
else
{
_preferredConversion = kColorConversion709;
}
}
else
{
if (isFullYUVRange)
{
_preferredConversion = kColorConversion601FullRange;
}
else
{
_preferredConversion = kColorConversion601;
}
}
CMTime currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);//得到当前帧的时间
[GPUImageContext useImageProcessingContext];
if ([GPUImageContext supportsFastTextureUpload] && captureAsYUV)
{
CVOpenGLESTextureRef luminanceTextureRef = NULL;
CVOpenGLESTextureRef chrominanceTextureRef = NULL;
if (CVPixelBufferGetPlaneCount(cameraFrame) > 0) // Check for YUV planar inputs to do RGB conversion
{
CVPixelBufferLockBaseAddress(cameraFrame, 0);//上锁线程同步
if ( (imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight) )
{
imageBufferWidth = bufferWidth;
imageBufferHeight = bufferHeight;
}
CVReturn err;
// Y-plane
glActiveTexture(GL_TEXTURE4);//获得纹理4 寄存器
if ([GPUImageContext deviceSupportsRedTextures])
{
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);//获得两的纹理
}
else
{
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
}
if (err)
{
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef);
glBindTexture(GL_TEXTURE_2D, luminanceTexture);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);//将得到的两的纹理绑定到渲染管线上
// UV-plane
glActiveTexture(GL_TEXTURE5);
if ([GPUImageContext deviceSupportsRedTextures])
{
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
}
else
{
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
}
if (err)
{
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef);
glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);//将得到的色度纹理绑定到渲染管线上 program上
int rotatedImageBufferWidth = bufferWidth, rotatedImageBufferHeight = bufferHeight;
if (GPUImageRotationSwapsWidthAndHeight(internalRotation))//转换宽、高
{
rotatedImageBufferWidth = bufferHeight;
rotatedImageBufferHeight = bufferWidth;
}
[self updateTargetsForVideoCameraUsingCacheTextureAtWidth:rotatedImageBufferWidth height:rotatedImageBufferHeight time:currentTime];
CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
CFRelease(luminanceTextureRef);
CFRelease(chrominanceTextureRef);
}
else
{
}
if (_runBenchmark)
{
numberOfFramesCaptured++;//采集帧数累加
if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK)
{
CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);//当前时间
totalFrameTimeDuringCapture += currentFrameTime;//录制的总时间
NSLog(@"Average frame time : %f ms", [self averageFrameDurationDuringCapture]);
NSLog(@"Current frame time : %f ms", 1000.0 * currentFrameTime);
}
}
}
else
{
CVPixelBufferLockBaseAddress(cameraFrame, 0);//上锁 线程同步
int bytesPerRow = (int) CVPixelBufferGetBytesPerRow(cameraFrame);//每行的数据
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bytesPerRow / 4, bufferHeight) onlyTexture:YES];
[outputFramebuffer activateFramebuffer];
glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bytesPerRow / 4, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));
[self updateTargetsForVideoCameraUsingCacheTextureAtWidth:bytesPerRow / 4 height:bufferHeight time:currentTime];
CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
if (_runBenchmark)
{
numberOfFramesCaptured++;
if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK)
{
CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
totalFrameTimeDuringCapture += currentFrameTime;
}
}
}
}
- (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;//处理音频帧
{
[self.audioEncodingTarget processAudioBuffer:sampleBuffer];
}
- (void)convertYUVToRGBOutput;//颜色空间转换
{
[GPUImageContext setActiveShaderProgram:yuvConversionProgram];
int rotatedImageBufferWidth = imageBufferWidth, rotatedImageBufferHeight = imageBufferHeight;
if (GPUImageRotationSwapsWidthAndHeight(internalRotation))
{
rotatedImageBufferWidth = imageBufferHeight;
rotatedImageBufferHeight = imageBufferWidth;
}
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(rotatedImageBufferWidth, rotatedImageBufferHeight) textureOptions:self.outputTextureOptions onlyTexture:NO];//输出缓存
[outputFramebuffer activateFramebuffer];
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);//清除视口
static const GLfloat squareVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};//顶点转换矩阵
glActiveTexture(GL_TEXTURE4);
glBindTexture(GL_TEXTURE_2D, luminanceTexture);
glUniform1i(yuvConversionLuminanceTextureUniform, 4);
glActiveTexture(GL_TEXTURE5);
glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
glUniform1i(yuvConversionChrominanceTextureUniform, 5);
glUniformMatrix3fv(yuvConversionMatrixUniform, 1, GL_FALSE, _preferredConversion);
glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
glVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [GPUImageFilter textureCoordinatesForRotation:internalRotation]);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);//绘制图源到buffer中
}
- (CGFloat)averageFrameDurationDuringCapture;
{
return (totalFrameTimeDuringCapture / (CGFloat)(numberOfFramesCaptured - INITIALFRAMESTOIGNOREFORBENCHMARK)) * 1000.0;
}
- (void)resetBenchmarkAverage;
{
numberOfFramesCaptured = 0;
totalFrameTimeDuringCapture = 0.0;
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
if (!self.captureSession.isRunning)
{
return;
}
else if (captureOutput == audioOutput)
{
[self processAudioSampleBuffer:sampleBuffer];
}
else
{
if (dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0)
{
return;
}
CFRetain(sampleBuffer);
runAsynchronouslyOnVideoProcessingQueue(^{
//Feature Detection Hook.
if (self.delegate)
{
[self.delegate willOutputSampleBuffer:sampleBuffer];//签协议等待处理
}
[self processVideoSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
dispatch_semaphore_signal(frameRenderingSemaphore);
});
}
}
- (void)setAudioEncodingTarget:(GPUImageMovieWriter *)newValue;
{
if (newValue) {
/* Add audio inputs and outputs, if necessary */
addedAudioInputsDueToEncodingTarget |= [self addAudioInputsAndOutputs];
}
else if (addedAudioInputsDueToEncodingTarget) {
/* Remove audio inputs and outputs, if they were added by previously setting the audio encoding target */
[self removeAudioInputsAndOutputs];
addedAudioInputsDueToEncodingTarget = NO;
}
[super setAudioEncodingTarget:newValue];
}
- (void)updateOrientationSendToTargets;
{
runSynchronouslyOnVideoProcessingQueue(^{
if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])
{
outputRotation = kGPUImageNoRotation;
if ([self cameraPosition] == AVCaptureDevicePositionBack)
{
if (_horizontallyMirrorRearFacingCamera)
{
switch(_outputImageOrientation)
{
case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRightFlipVertical; break;
case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotate180; break;
case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageFlipHorizonal; break;
case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageFlipVertical; break;
default:internalRotation = kGPUImageNoRotation;
}
}
else
{
switch(_outputImageOrientation)
{
case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRight; break;
case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotateLeft; break;
case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageRotate180; break;
case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageNoRotation; break;
default:internalRotation = kGPUImageNoRotation;
}
}
}
else
{
if (_horizontallyMirrorFrontFacingCamera)
{
switch(_outputImageOrientation)
{
case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRightFlipVertical; break;
case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotateRightFlipHorizontal; break;
case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageFlipHorizonal; break;
case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageFlipVertical; break;
default:internalRotation = kGPUImageNoRotation;
}
}
else
{
switch(_outputImageOrientation)
{
case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRight; break;
case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotateLeft; break;
case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageNoRotation; break;
case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageRotate180; break;
default:internalRotation = kGPUImageNoRotation;
}
}
}
}
else
{
if ([self cameraPosition] == AVCaptureDevicePositionBack)
{
if (_horizontallyMirrorRearFacingCamera)
{
switch(_outputImageOrientation)
{
case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRightFlipVertical; break;
case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotate180; break;
case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageFlipHorizonal; break;
case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageFlipVertical; break;
default:outputRotation = kGPUImageNoRotation;
}
}
else
{
switch(_outputImageOrientation)
{
case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRight; break;
case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotateLeft; break;
case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageRotate180; break;
case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageNoRotation; break;
default:outputRotation = kGPUImageNoRotation;
}
}
}
else
{
if (_horizontallyMirrorFrontFacingCamera)
{
switch(_outputImageOrientation)
{
case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRightFlipVertical; break;
case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotateRightFlipHorizontal; break;
case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageFlipHorizonal; break;
case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageFlipVertical; break;
default:outputRotation = kGPUImageNoRotation;
}
}
else
{
switch(_outputImageOrientation)
{
case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRight; break;
case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotateLeft; break;
case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageNoRotation; break;
case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageRotate180; break;
default:outputRotation = kGPUImageNoRotation;
}
}
}
}
for (id<GPUImageInput> currentTarget in targets)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
[currentTarget setInputRotation:outputRotation atIndex:[[targetTextureIndices objectAtIndex:indexOfObject] integerValue]];
}
});
}
- (void)setOutputImageOrientation:(UIInterfaceOrientation)newValue;
{
_outputImageOrientation = newValue;
[self updateOrientationSendToTargets];
}
- (void)setHorizontallyMirrorFrontFacingCamera:(BOOL)newValue
{
_horizontallyMirrorFrontFacingCamera = newValue;
[self updateOrientationSendToTargets];
}
- (void)setHorizontallyMirrorRearFacingCamera:(BOOL)newValue
{
_horizontallyMirrorRearFacingCamera = newValue;
[self updateOrientationSendToTargets];
}
@end