1.swift 3.0系统原生的数据采集
import UIKit
import AVFoundation
class InformationAcquisitionViewController: UIViewController {
lazy var captureSession: AVCaptureSession = {
// 创建捕捉会话
let captureSession = AVCaptureSession()
return captureSession
}()
// 视频输入
var videoDeviceInput: AVCaptureDeviceInput?
/// 视频连接
var videoConnection : AVCaptureConnection?
/// 视频预览图层
lazy var previedLayer: AVCaptureVideoPreviewLayer = {
let previedLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)!
previedLayer.frame = UIScreen.main.bounds
return previedLayer
}()
// 聚焦图片
lazy var focusImageView: UIImageView = {
let foucusImageView = UIImageView(image: UIImage(named: "focus"))
foucusImageView.frame = CGRect(x: 0, y: 0, width: 80, height: 61)
self.view.addSubview(foucusImageView)
return foucusImageView
}()
override func viewDidLoad() {
super.viewDidLoad()
setupCaputureVideo()
}
}
// MARK:- 界面设置
extension InformationAcquisitionViewController {
/// 音视频捕捉
func setupCaputureVideo() {
// 获取摄像头设备,默认后摄像头
var videoDevice = getVideoDevice(.back)
if videoDevice == nil {
videoDevice = getVideoDevice(.front)
}
guard let video = videoDevice else {
return
}
// 创建视频输入对象
guard let videoDeviceInput = try? AVCaptureDeviceInput(device: video) else {
return
}
self.videoDeviceInput = videoDeviceInput
// 获取声音设备
let audioDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
// 创建音频输入对象
guard let audioDeviceInput = try? AVCaptureDeviceInput(device: audioDevice) else {
return
}
// 添加至会话中,不能添加空
if captureSession.canAddInput(videoDeviceInput) {
captureSession.addInput(videoDeviceInput)
}
if captureSession.canAddInput(audioDeviceInput) {
captureSession.addInput(audioDeviceInput)
}
// 获取视频输出设备
let videoOutput = AVCaptureVideoDataOutput()
let videoQueue = DispatchQueue(label: "Video Capture Queue")
// 设置代理,捕获视频数据
videoOutput.setSampleBufferDelegate(self, queue: videoQueue)
// 添加输出对话
if captureSession.canAddOutput(videoOutput) {
captureSession.addOutput(videoOutput)
}
// 获取音频数据输出设备
let audioOutput = AVCaptureAudioDataOutput()
let audioQueue = DispatchQueue(label: "Audio Capture Queue")
// 设置代理获取音频数据
audioOutput.setSampleBufferDelegate(self, queue: audioQueue)
if captureSession.canAddOutput(audioOutput) {
captureSession.addOutput(audioOutput)
}
// 获取视频输入和输出连接,用于分辨音视频数据
videoConnection = videoOutput.connection(withMediaType: AVMediaTypeVideo)
// 添加视频预览图层
self.view.layer.insertSublayer(previedLayer, at: 0)
// 启动会话
captureSession.startRunning()
}
/// 通过指定摄像头方向获取摄像头
///
/// - Parameter position: 方向
/// - Returns: 摄像头对象
func getVideoDevice(_ position : AVCaptureDevicePosition) -> AVCaptureDevice? {
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)
guard devices != nil else { return nil }
for device in devices! {
if device is AVCaptureDevice {
if (device as! AVCaptureDevice).position == position {
return device as? AVCaptureDevice
}
}
}
return nil
}
// 设置聚焦光标出现的位置
func setFocusCursorWithPoint(_ point : CGPoint) {
focusImageView.center = point
focusImageView.transform = CGAffineTransform(scaleX: 1.5, y: 1.5)
focusImageView.alpha = 1.0
UIView.animate(withDuration: 1.0, animations: {
self.focusImageView.transform = CGAffineTransform(scaleX: 1.0, y: 1.0)
}) { (_) in
self.focusImageView.alpha = 0.0
}
}
// 聚焦设置
func setupFocus(focusMode : AVCaptureFocusMode ,exposureMode : AVCaptureExposureMode , point : CGPoint) {
guard let captureDevice = videoDeviceInput?.device else {
return
}
// 锁定配置
guard ((try? captureDevice.lockForConfiguration()) != nil) else {
return
}
// 设置聚焦
if captureDevice.isFocusModeSupported(.autoFocus) {
captureDevice.focusMode = .autoFocus
}
if captureDevice.isFocusPointOfInterestSupported {
captureDevice.focusPointOfInterest = point
}
// 设置曝光
if captureDevice.isExposureModeSupported(.autoExpose) {
captureDevice.exposureMode = .autoExpose
}
if captureDevice.isExposurePointOfInterestSupported {
captureDevice.exposurePointOfInterest = point
}
// 解锁配置
captureDevice.unlockForConfiguration()
}
}
// MARK:- 事件监听
extension InformationAcquisitionViewController {
// 切换摄像头
@IBAction func clickSwitchCamera(_ sender: UIBarButtonItem) {
// 获取当前设备方向
let currentPosition = videoDeviceInput?.device.position
// 获取需要改变的方向
let togglePosition = currentPosition == AVCaptureDevicePosition.back ? AVCaptureDevicePosition.front : AVCaptureDevicePosition.back
// 获取改变的摄像头设备
guard let toggleDevice = getVideoDevice(togglePosition) else {
return
}
// 获取新输入设备
guard let toggleDeviceInput = try? AVCaptureDeviceInput(device: toggleDevice) else {
return
}
// 移出旧摄像头输入设备
captureSession.removeInput(videoDeviceInput)
// 添加新设备
captureSession.addInput(toggleDeviceInput)
// 记录该设备
videoDeviceInput = toggleDeviceInput
}
// 点击屏幕出现聚焦视图
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
// 获取点击位置
guard let point = ((touches as? NSSet)?.anyObject() as? UITouch)?.location(in: view) else {
return
}
// 点击位置转换成摄像头点的位置
let cameraPoint = previedLayer.captureDevicePointOfInterest(for: point)
// 设置聚焦光标位置
setFocusCursorWithPoint(point)
// 设置聚焦
setupFocus(focusMode: .autoFocus, exposureMode: .autoExpose, point: cameraPoint)
}
}
// MARK:- AVCaptureVideoDataOutputSampleBufferDelegate
extension InformationAcquisitionViewController : AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate {
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
if videoConnection == connection {
print("采集到视频信息")
} else {
print("采集到音频信息")
}
}
}