首页 > 解决方案 > 视频过滤代码适用于 iOS,但不适用于 macOS

问题描述

我正在开发适用于 iOS 和 macOS 的视频过滤器,它从默认摄像头捕获视频输入,应用过滤器 (MPSImageGaussianBlur),并使用 MTKView 渲染它。

它在 iOS 上运行良好(iPhone 6s 和 iPhone 11 上为 13),但我在 MacOS 上只看到一个红屏(MacBook Pro 上为 10.15),不知道为什么。代码按预期重复调用 captureOutput() 和 draw()。

这是执行大部分工作的 VS2CameraSession。(请注意,我按照 CVMetalTextureCacheCreateTextureFromImage 在 macOS 10.13 上返回 -6660的建议将 kCVPixelBufferMetalCompatibilityKey 标志添加到 videoSettings 中)

import AVFoundation
import MetalPerformanceShaders

class VS2CameraSession: NSObject {
    let gpu = MTLCreateSystemDefaultDevice()!

    private let session = AVCaptureSession()
    private let camera = AVCaptureDevice.default(for: .video)
    private var textureCache:CVMetalTextureCache?
    private var texture:MTLTexture?

    func startRunning() {
        CVMetalTextureCacheCreate(nil, nil, gpu, nil, &textureCache)
        guard let camera = camera,
              let input = try? AVCaptureDeviceInput(device: camera) else {
            return
        }
        guard session.canAddInput(input) else {
            return
        }
        session.addInput(input)
        
        let output = AVCaptureVideoDataOutput()
        output.alwaysDiscardsLateVideoFrames = true
        #if os(macOS)
        // https://stackoverflow.com/questions/46549906/cvmetaltexturecachecreatetexturefromimage-returns-6660-on-macos-10-13
        output.videoSettings = [
            kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA,
            kCVPixelBufferMetalCompatibilityKey as String: true
        ]
        #else
        output.videoSettings = [
            kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA
        ]
        #endif
        output.setSampleBufferDelegate(self, queue: DispatchQueue.main)
        session.addOutput(output)

        session.startRunning()
    }
    
    func draw(drawable:CAMetalDrawable?) {
        guard let texture = self.texture,
           let drawable = drawable,
           let commandQueue = gpu.makeCommandQueue(),
           let commandBuffer = commandQueue.makeCommandBuffer() else {
            return
        }
        // Apply filter(s)
        let filter = MPSImageGaussianBlur(device:gpu, sigma: 10.0)
        filter.encode(commandBuffer: commandBuffer, sourceTexture: texture, destinationTexture: drawable.texture)
        
        commandBuffer.present(drawable)
        commandBuffer.commit()
        self.texture = nil // no need to draw it again
    }
}

extension VS2CameraSession : AVCaptureVideoDataOutputSampleBufferDelegate {
    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        if let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer),
           let textureCache = self.textureCache {
            let width = CVPixelBufferGetWidth(pixelBuffer)
            let height = CVPixelBufferGetHeight(pixelBuffer)
            var textureRef:CVMetalTexture?
            CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, textureCache, pixelBuffer, nil,
                                                      .bgra8Unorm, width, height, 0, &textureRef)
            texture = CVMetalTextureGetTexture(textureRef!)
        }
    }
}

这是 VS2CameraViewController,它使用 VS2CameraSession 来渲染它的视图。

import UIKit
import SwiftUI
import MetalKit

final class VS2CameraViewController: UIViewController {
    let cameraSession = VS2CameraSession()

    override func loadView() {
        let metalView = MTKView()
        metalView.device = self.cameraSession.gpu
        metalView.delegate = self
        metalView.clearColor = MTLClearColorMake(1, 1, 1, 1)
        metalView.colorPixelFormat = MTLPixelFormat.bgra8Unorm
        metalView.framebufferOnly = false
        self.view = metalView
    }
    
    override func viewDidLoad() {
        cameraSession.startRunning()
    }
}

extension VS2CameraViewController : MTKViewDelegate {
    func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {
    }
    
    func draw(in view: MTKView) {
        cameraSession.draw(drawable: view.currentDrawable)
    }
}

extension VS2CameraViewController : UIViewControllerRepresentable {
    typealias UIViewControllerType = VS2CameraViewController
    
    public func makeUIViewController(context: UIViewControllerRepresentableContext<VS2CameraViewController>) -> VS2CameraViewController {
        return VS2CameraViewController()
    }
    
    public func updateUIViewController(_ uiViewController: VS2CameraViewController, context: UIViewControllerRepresentableContext<VS2CameraViewController>) {
    }
}

整个源代码可在https://github.com/snakajima/VideoShader2/tree/stack_overflow获得。

标签: macosavfoundationmetalavcapturesessioncvpixelbuffer

解决方案


我在这里找到了答案。 在 MacOS 中使用 AVFoundation 和 Metal 的帧速率非常慢

我只需要保留对 sampleBuffer 的引用以及纹理。


推荐阅读