ScreenCaptureKit 实现录屏功能

import Cocoa
import ScreenCaptureKit

class InairScreenCaptureRecord: NSObject,SCStreamDelegate, SCStreamOutput {
    
    @objc static let shareManager = InairScreenCaptureRecord()
    
    var screen: SCDisplay?
    var availableContent: SCShareableContent?
    var filter: SCContentFilter?
    var stream: SCStream!
    var audioSettings: [String : Any]!
    var tag:Int = 0
    var recordingBufferCallBack: ((_ buffer:CMSampleBuffer, _ tag:Int) -> Void)?
    
    private let videoSampleBufferQueue = DispatchQueue(label: "screenCaptureKit-samplecode.VideoSampleBufferQueue")
    private let audioSampleBufferQueue = DispatchQueue(label: "screenCaptureKit-samplecode.AudioSampleBufferQueue")

//检测录屏权限
    var canRecord: Bool {
        get async {
            do {
                // 如果应用程序没有屏幕录制权限,这个调用会产生一个异常。
                try await SCShareableContent.excludingDesktopWindows(false, onScreenWindowsOnly: true)
                return true
            } catch {
                return false
            }
        }
    }

    deinit {
        self.stopRecording()
    }
    
    override init() {
        super.init()
    }
    
    @objc func prepRecord(displayID:CGDirectDisplayID, tag:Int) {
        self.tag = tag        
        Task {
            if await self.canRecord {
                do {
                    // 检索要捕获的可用屏幕。
                    let availableContent = try await SCShareableContent.excludingDesktopWindows(false,onScreenWindowsOnly: true)
                    
                    self.availableContent = availableContent
                    self.updateAudioSettings()
                    // 获取需要录制的屏幕
                    self.screen = self.availableContent?.displays.first(where: { displayID == $0.displayID })
                    self.filter = SCContentFilter(display: self.screen!, excludingApplications: [], exceptingWindows: [])
                    Task { await self.record(audioOnly: false, filter: self.filter!) }
                    
                } catch {
                    print("Failed to get the shareable content: \(error.localizedDescription)")
                }
            } else {
            }
        }
    }
    //获取芯片类型是inter还是arm
    @objc open func getCPUTypeIsARM() -> Bool {
        var size: size_t = MemoryLayout.size(ofValue: 0)
        sysctlbyname("hw.cputype", nil, &size, nil, 0)
        
        var type: Int32 = 0
        sysctlbyname("hw.cputype", &type, &size, nil, 0)
        
        if (type == CPU_TYPE_ARM64) {
            print("ARM===ARM===ARM===ARM")
            return true
        } else {
            print("X86_64===X86_64===X86_64===X86_64")
            return false
        }
    }


    func record(audioOnly: Bool, filter: SCContentFilter) async {
        if (self.screen == nil) {
            return
        }
        let streamConfig = SCStreamConfiguration()
        streamConfig.pixelFormat = OSType(kCVPixelFormatType_32BGRA)//设置输出类型
            streamConfig.width = self.screen!.width
            streamConfig.height = self.screen!.height
        streamConfig.minimumFrameInterval = CMTime(value: 1, timescale: 60)//设置帧率
        streamConfig.showsCursor = true
        streamConfig.queueDepth = 5
//下面是开启音频
//        conf.capturesAudio = true
//        conf.sampleRate = audioSettings["AVSampleRateKey"] as! Int
//        conf.channelCount = audioSettings["AVNumberOfChannelsKey"] as! Int

        self.stream = SCStream(filter: filter, configuration: streamConfig, delegate: self)

        do {
            try self.stream.addStreamOutput(self, type: .screen, sampleHandlerQueue: videoSampleBufferQueue)
//下面是添加音频
//            try self.stream.addStreamOutput(self, type: .audio, sampleHandlerQueue: audioSampleBufferQueue)
            try await self.stream.startCapture()
        } catch {
            assertionFailure("capture failed".local)
            return
        }
    }

    @objc func stopRecording() {
        if self.stream != nil {
            self.stream.stopCapture()
        }
        self.stream = nil
        self.screen = nil
        self.availableContent = nil
    }

//设置音频采集参数
    func updateAudioSettings() {
        self.audioSettings = [AVSampleRateKey : 48000, AVNumberOfChannelsKey : 2] // reset audioSettings
        self.audioSettings[AVFormatIDKey] = kAudioFormatMPEG4AAC
        self.audioSettings[AVEncoderBitRateKey] = 256 * 1000
        
    }
    
    
    func stream(_ stream: SCStream, didOutputSampleBuffer sampleBuffer: CMSampleBuffer, of outputType: SCStreamOutputType) {
        guard sampleBuffer.isValid else { return }

        switch outputType {
            case .screen:
            if self.screen == nil {
                break
            }
            print("===========视频=====================");
            self.recordingBufferCallBack?(sampleBuffer,self.tag)
                break
            case .audio:
            print("===========音频(没做处理)=====================");
            
                break
            @unknown default:
            assertionFailure("unknown stream type".local)
        }
    }

    func stream(_ stream: SCStream, didStopWithError error: Error) { // stream error
        print("关闭流时出现错误:\n".local, error,
              "\n 这可能是由于窗口关闭或用户从UI停止".local)
        DispatchQueue.main.async {
            self.stopRecording()
        }
    }
    
    /// 根据sampleBuffer获取帧的宽、高和基地址
    /// - Parameters:
    ///   - tag: 录制屏幕标识
    ///   - sampleBuffer: 视频的每一帧
    ///   - complation:【data: 基地址,width:宽,height:高,newTag:录制屏幕标识,sampleSize:每帧的大小,RawSampleBuffer:原始帧(即sampleBuffer)】
    func convertCMSampleBufferToData(_ tag:Int, sampleBuffer: CMSampleBuffer,complation: @escaping ((_ data: UnsafeMutableRawPointer?, _ width:Int, _ height:Int, _ newTag:Int, _ sampleSize:Int,_ RawSampleBuffer:CMSampleBuffer) -> Void)) {
        
        guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
            return
        }
        CVPixelBufferLockBaseAddress(imageBuffer, CVPixelBufferLockFlags(rawValue: 0))
        let baseAddress = CVPixelBufferGetBaseAddress(imageBuffer)

        let height = CVPixelBufferGetHeight(imageBuffer)
        var width = CVPixelBufferGetWidth(imageBuffer)
        let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer)

        print("======(\(width),\(height)) ===== PerRow = \(bytesPerRow)")

        //获取字节大小
        let sampleSize1 = CVPixelBufferGetDataSize(imageBuffer)
//        let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer)
        //补齐差的宽度(解决苹果芯片部分分辨率花屏问题)iOS端可以不用
        width = width + (sampleSize1 - width*height*4)/(height*4)
        
        CVPixelBufferUnlockBaseAddress(imageBuffer, CVPixelBufferLockFlags(rawValue: 0))

        if (baseAddress == nil) {
            complation(nil, width, height,tag,sampleSize1,sampleBuffer)
            return
        }
        complation(baseAddress!, width, height,tag,sampleSize1,sampleBuffer)
    }
}

extension String {
    var local: String { return NSLocalizedString(self, comment: "") }
}

使用示例


var ScreenCaptureRecord:InairScreenCaptureRecord?
var ScreenCaptureRecord1:InairScreenCaptureRecord?

  
    @objc public func start() {
        //获取需要录制的屏幕
        let customScreenArray = self.getDisplayScreen()
        
        print("--------开始录屏------------")
        
        var i = 0
        for screen in customScreenArray {
            let displayID = screen.deviceDescription[NSDeviceDescriptionKey(rawValue: "NSScreenNumber")] as! CGDirectDisplayID
            
            let name = screen.localizedName.lowercased()
            if (name.contains("screenname1")) {
                self.ScreenCaptureRecord1 = InairScreenCaptureRecord()
                self.ScreenCaptureRecord1!.prepRecord(displayID: displayID,tag: 1)
            } else {
                //录制主屏幕
                self.ScreenCaptureRecord = InairScreenCaptureRecord()
                self.ScreenCaptureRecord!.prepRecord(displayID: displayID,tag: 0)
            }
            i += 1
        }
        
        self.recordingBufferReceiveProcessing()
    }
       
       
    @objc public func stop() {
        print("--------停止录屏---------")
        self.ScreenCaptureRecord?.stopRecording()
        self.ScreenCaptureRecord1?.stopRecording()
        self.ScreenCaptureRecord = nil
        self.ScreenCaptureRecord1 = nil
    }
    
    //处理获取到的数据流
    func recordingBufferReceiveProcessing() {
        self.ScreenCaptureRecord?.recordingBufferCallBack = { (buffer, tag) in
            //直接显示到NSView上(使用layer或者metal)
            self.metalRender(with: buffer)
        }
        
        self.ScreenCaptureRecord1?.recordingBufferCallBack = { (buffer, tag) in
            self.metalRender(with: buffer)
        }
        
    }

 func getDisplayScreen() -> [NSScreen] {
        var customScreenArray:[NSScreen] = []
        var i = 0
        for screen in NSScreen.screens {
            let displayId = screen.deviceDescription[NSDeviceDescriptionKey(rawValue: "NSScreenNumber")]  as! CGDirectDisplayID
            //判断是否是内置屏
            if ((CGDisplayIsBuiltin(displayId)) != 0) {
                customScreenArray.append(screen)
            }
            let displayName:String = screen.localizedName
            
            if (displayName.contains("screenname1")) {
                customScreenArray.append(screen)
            }
           }
        return customScreenArray
    }

渲染

layer渲染

var displayLayer: AVSampleBufferDisplayLayer?

func metalRender(with sampleBuffer: CMSampleBuffer) {
  DispatchQueue.main.sync {
      if self.displayLayer == nil {
          self.displayLayer = AVSampleBufferDisplayLayer()
          self.displayLayer?.frame = self.view.bounds // 设置渲染view的frame
          //self.displayLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
          self.view.layer.addSublayer(self.displayLayer!)
      }
    
      self.displayLayer?.enqueue(sampleBuffer)
  }
}

metal渲染

import MetalKit
import CoreMedia
import MetalPerformanceShaders

var mtkView: MTKView?// 展示视图
var processQueue: DispatchQueue?// 处理队列
var textureCache: CVMetalTextureCache?// 纹理缓存区
var commandQueue: MTLCommandQueue?// 命令队列
var texture: MTLTexture?// 纹理

//写到初始化里面,不然delegate不起作用
setupMetal()

func setupMetal() {
    // 1.初始化MTKView
    self.mtkView = MTKView(frame: self.view.bounds)//设置自己的渲染view的frame
    self.mtkView?.device = MTLCreateSystemDefaultDevice()
    self.view.addSubview(self.mtkView!)
    self.mtkView?.delegate = self
    // 2.设置MTKView的drawable纹理是可读写的(默认是只读)
    self.mtkView?.framebufferOnly = false
    // 3.创建命令队列
    self.commandQueue = self.mtkView?.device?.makeCommandQueue()
    // 4.创建Core Video的Metal纹理缓存区
    CVMetalTextureCacheCreate(nil, nil, self.mtkView!.device!, nil, &textureCache)
}

func metalRender(with sampleBuffer: CMSampleBuffer) {
    // 1.从sampleBuffer获取视频像素缓存区对象
    let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
    if (pixelBuffer == nil) {
        return
    }
    // 2.获取捕捉视频的宽和高
    let width = CVPixelBufferGetWidth(pixelBuffer!)
    let height = CVPixelBufferGetHeight(pixelBuffer!)
    // 4.从现有图像缓冲区创建核心视频Metal纹理缓冲区
    var tmpTexture: CVMetalTexture?
    let status = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, self.textureCache!, pixelBuffer!, nil, .bgra8Unorm, width, height, 0, &tmpTexture)
    // 判断纹理缓冲区是否创建成功
    if status == kCVReturnSuccess {
        // 5.设置可绘制纹理的当前大小
        self.mtkView?.drawableSize = CGSize(width: CGFloat(width), height: CGFloat(height))
        // 6.返回纹理缓冲区的Metal纹理对象
        self.texture = CVMetalTextureGetTexture(tmpTexture!)
    }
}


extension ScreenRecordingViewController: MTKViewDelegate {
    func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {
        print("视图大小发生改变时会调用此方法")
    }
    
    func draw(in view: MTKView) {
        // 判断是否获取了AVFoundation采集的纹理数据
        if let texture = self.texture {
            // 1.创建指令缓冲
            let commandBuffer = commandQueue?.makeCommandBuffer()
            // 2.将MTKView作为目标渲染纹理
            let drawingTexture = view.currentDrawable?.texture
            // 3.创建高斯滤镜,sigma值越高图像越模糊
            let filter = MPSImageGaussianBlur(device: mtkView!.device!, sigma: 1)
            // 4.高斯滤镜以Metal纹理作为输入和输出
            // 输入:摄像头采集的图像 self.texture
            // 输出:创建的纹理 drawingTexture(其实就是view.currentDrawable.texture)
            filter.encode(commandBuffer: commandBuffer!, sourceTexture: texture, destinationTexture: drawingTexture!)
            // 5.展示显示的内容并提交命令
            commandBuffer?.present(view.currentDrawable!)
            commandBuffer?.commit()
            // 6.清空当前纹理,准备下一次的纹理数据读取
            self.texture = nil
        }
    }
}

最后编辑于
©著作权归作者所有,转载或内容合作请联系作者
  • 序言:七十年代末,一起剥皮案震惊了整个滨河市,随后出现的几起案子,更是在滨河造成了极大的恐慌,老刑警刘岩,带你破解...
    沈念sama阅读 203,547评论 6 477
  • 序言:滨河连续发生了三起死亡事件,死亡现场离奇诡异,居然都是意外死亡,警方通过查阅死者的电脑和手机,发现死者居然都...
    沈念sama阅读 85,399评论 2 381
  • 文/潘晓璐 我一进店门,熙熙楼的掌柜王于贵愁眉苦脸地迎上来,“玉大人,你说我怎么就摊上这事。” “怎么了?”我有些...
    开封第一讲书人阅读 150,428评论 0 337
  • 文/不坏的土叔 我叫张陵,是天一观的道长。 经常有香客问我,道长,这世上最难降的妖魔是什么? 我笑而不...
    开封第一讲书人阅读 54,599评论 1 274
  • 正文 为了忘掉前任,我火速办了婚礼,结果婚礼上,老公的妹妹穿的比我还像新娘。我一直安慰自己,他们只是感情好,可当我...
    茶点故事阅读 63,612评论 5 365
  • 文/花漫 我一把揭开白布。 她就那样静静地躺着,像睡着了一般。 火红的嫁衣衬着肌肤如雪。 梳的纹丝不乱的头发上,一...
    开封第一讲书人阅读 48,577评论 1 281
  • 那天,我揣着相机与录音,去河边找鬼。 笑死,一个胖子当着我的面吹牛,可吹牛的内容都是我干的。 我是一名探鬼主播,决...
    沈念sama阅读 37,941评论 3 395
  • 文/苍兰香墨 我猛地睁开眼,长吁一口气:“原来是场噩梦啊……” “哼!你这毒妇竟也来了?” 一声冷哼从身侧响起,我...
    开封第一讲书人阅读 36,603评论 0 258
  • 序言:老挝万荣一对情侣失踪,失踪者是张志新(化名)和其女友刘颖,没想到半个月后,有当地人在树林里发现了一具尸体,经...
    沈念sama阅读 40,852评论 1 297
  • 正文 独居荒郊野岭守林人离奇死亡,尸身上长有42处带血的脓包…… 初始之章·张勋 以下内容为张勋视角 年9月15日...
    茶点故事阅读 35,605评论 2 321
  • 正文 我和宋清朗相恋三年,在试婚纱的时候发现自己被绿了。 大学时的朋友给我发了我未婚夫和他白月光在一起吃饭的照片。...
    茶点故事阅读 37,693评论 1 329
  • 序言:一个原本活蹦乱跳的男人离奇死亡,死状恐怖,灵堂内的尸体忽然破棺而出,到底是诈尸还是另有隐情,我是刑警宁泽,带...
    沈念sama阅读 33,375评论 4 318
  • 正文 年R本政府宣布,位于F岛的核电站,受9级特大地震影响,放射性物质发生泄漏。R本人自食恶果不足惜,却给世界环境...
    茶点故事阅读 38,955评论 3 307
  • 文/蒙蒙 一、第九天 我趴在偏房一处隐蔽的房顶上张望。 院中可真热闹,春花似锦、人声如沸。这庄子的主人今日做“春日...
    开封第一讲书人阅读 29,936评论 0 19
  • 文/苍兰香墨 我抬头看了看天上的太阳。三九已至,却和暖如春,着一层夹袄步出监牢的瞬间,已是汗流浃背。 一阵脚步声响...
    开封第一讲书人阅读 31,172评论 1 259
  • 我被黑心中介骗来泰国打工, 没想到刚下飞机就差点儿被人妖公主榨干…… 1. 我叫王不留,地道东北人。 一个月前我还...
    沈念sama阅读 43,970评论 2 349
  • 正文 我出身青楼,却偏偏与公主长得像,于是被迫代替她去往敌国和亲。 传闻我的和亲对象是个残疾皇子,可洞房花烛夜当晚...
    茶点故事阅读 42,414评论 2 342

推荐阅读更多精彩内容