首页 > 解决方案 > 在 glkview 前置摄像头显示镜像问题

问题描述

我在快速后置摄像头中使用 glkview 制作自定义摄像头,但是当我将摄像头切换到前面时。前置摄像头开始镜像,任何人都可以帮我解决这个问题。

import UIKit
import GLKit
import AVFoundation

public protocol FilterCamViewControllerDelegate: class {
    func filterCamDidStartRecording(_ filterCam: FilterCamViewController)
    func filterCamDidFinishRecording(_ filterCame: FilterCamViewController)
    func filterCam(_ filterCam: FilterCamViewController, didFailToRecord error: Error)
    func filterCam(_ filterCam: FilterCamViewController, didFinishWriting outputURL: URL)
    func filterCam(_ filterCam: FilterCamViewController, didFocusAtPoint tapPoint: CGPoint)
}

open class FilterCamViewController: UIViewController {



    var imageClickeClouser:((_ image: UIImage)->Void)?
    public weak var cameraDelegate: FilterCamViewControllerDelegate?

    public var devicePosition = AVCaptureDevice.Position.back

    public var videoQuality = AVCaptureSession.Preset.high

    public var filters: [CIFilter] = [] {
        didSet {
            recorder.filters = filters
        }
    }

    lazy var glContext: EAGLContext = {
        let glContext = EAGLContext(api: .openGLES2)
        return glContext!
    }()

    public var hasTorch: Bool {
        return recorder.hasTorch
    }

    public var torchLevel: Float {
        set {
            recorder.torchLevel = newValue
        }
        get {
            return recorder.torchLevel
        }
    }

    public var shouldShowDebugLabels: Bool = false {
        didSet {
            fpsLabel.isHidden = !shouldShowDebugLabels
            secLabel.isHidden = !shouldShowDebugLabels
        }
    }


    private let previewViewRect: CGRect

    private var videoPreviewContainerView: UIView!

    private var videoPreviewView: GLKView!

    private var ciContext: CIContext!

    private var recorder: Recorder!

    private var videoPreviewViewBounds: CGRect = .zero

    private var fpsLabel: UILabel!

    private var secLabel: UILabel!

   // var stillImageOutput: AVCapturePhotoOutput?
    var stillImageOutput = AVCapturePhotoOutput()

    var capturedUIImage: UIImage?

    lazy var cameraSession: AVCaptureSession = {
        let session = AVCaptureSession()
        session.sessionPreset = AVCaptureSession.Preset.photo
        return session
    }()

    lazy var photoFullPath: String = {
        let documentsPath = NSSearchPathForDirectoriesInDomains(
            .documentDirectory,
            .userDomainMask,
            true)[0]

        let photoFullPath = documentsPath + "/swift_3_camera_capture_photo.png"
        let fileManager = FileManager.default

        if fileManager.fileExists(atPath: photoFullPath) {
            do {
                try fileManager.removeItem(at: URL(string: photoFullPath)!)
            } catch let error as NSError {
                print (error)
            }
        }

        return photoFullPath
    }()

    private var isRecording: Bool {
        return recorder.assetWriter != nil
    }

    override open var supportedInterfaceOrientations: UIInterfaceOrientationMask {
        return .portrait
    }

    override open var shouldAutorotate: Bool {
        return false
    }

    public init(previewViewRect: CGRect) {
        self.previewViewRect = previewViewRect
        super.init(nibName: nil, bundle: nil)
    }

    required public init?(coder aDecoder: NSCoder) {
        previewViewRect = UIScreen.main.bounds
        super.init(coder: aDecoder)
    }

    override open func viewDidLoad() {
        super.viewDidLoad()

        view.backgroundColor = .clear

        videoPreviewContainerView = UIView(frame: previewViewRect)
        videoPreviewContainerView.backgroundColor = .black
        view.addSubview(videoPreviewContainerView)
        view.sendSubview(toBack: videoPreviewContainerView)

        // setup the GLKView for video/image preview
        guard let eaglContext = EAGLContext(api: .openGLES2) else {
            fatalError("Could not create EAGLContext")
        }
        if eaglContext != EAGLContext.current() {
            EAGLContext.setCurrent(eaglContext)
        }
        videoPreviewView = GLKView(frame: CGRect(x: 0,
                                                 y: 0,
                                                 width: previewViewRect.height,
                                                 height: previewViewRect.width),
                                   context: eaglContext)
        videoPreviewContainerView.addSubview(videoPreviewView)

 videoPreviewView.transform = CGAffineTransform(rotationAngle: .pi / 2)
        videoPreviewView.frame = view.bounds
                videoPreviewView.center = CGPoint(x: previewViewRect.width * 0.5, y: previewViewRect.height * 0.5)
        videoPreviewView.enableSetNeedsDisplay = false

ciContext = CIContext(eaglContext: eaglContext, 选项: [kCIContextWorkingColorSpace: NSNull()])

    recorder = Recorder(ciContext: ciContext, devicePosition: devicePosition, preset: videoQuality)
    recorder.delegate = self


    setupDebugLabels()
    addGestureRecognizers()
}

func setupCameraSession() {

    let captureDevice = AVCaptureDevice.default(AVCaptureDevice.DeviceType.builtInWideAngleCamera, for: AVMediaType.video, position: AVCaptureDevice.Position.back)

    do {
        cameraSession.beginConfiguration()

        let deviceInput = try AVCaptureDeviceInput(device: captureDevice!)
        if cameraSession.canAddInput(deviceInput) {
            cameraSession.addInput(deviceInput)
        }

        stillImageOutput = AVCapturePhotoOutput()
        if cameraSession.canAddOutput(stillImageOutput) {
            cameraSession.addOutput(stillImageOutput)
        }

        let videoOutput = AVCaptureVideoDataOutput()
        videoOutput.videoSettings = [
            kCVPixelBufferPixelFormatTypeKey as AnyHashable: kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
            ] as! [String : Any]
        videoOutput.alwaysDiscardsLateVideoFrames = true
        if cameraSession.canAddOutput(videoOutput) {
            cameraSession.addOutput(videoOutput)
        }

        cameraSession.commitConfiguration()

        } catch let error as NSError {
        print (error)
    }
}


// MARK: - Private

private func setupDebugLabels() {
    fpsLabel = UILabel()
    fpsLabel.isHidden = true
    view.addSubview(fpsLabel)
    fpsLabel.translatesAutoresizingMaskIntoConstraints = false
    fpsLabel.leadingAnchor.constraint(equalTo: view.leadingAnchor, constant: 10).isActive = true
    fpsLabel.topAnchor.constraint(equalTo: view.topAnchor, constant: 20).isActive = true
    fpsLabel.text = ""
    fpsLabel.textColor = .white

    secLabel = UILabel()
    secLabel.isHidden = true
    view.addSubview(secLabel)
    secLabel.translatesAutoresizingMaskIntoConstraints = false
    secLabel.leadingAnchor.constraint(equalTo: fpsLabel.leadingAnchor).isActive = true
    secLabel.topAnchor.constraint(equalTo: fpsLabel.bottomAnchor).isActive = true
    secLabel.text = ""
    secLabel.textColor = .white
}

private func addGestureRecognizers() {
    let singleTapGesture = UITapGestureRecognizer(target: self, action: #selector(singleTapGesture(tap:)))
    singleTapGesture.numberOfTapsRequired = 1
    view.addGestureRecognizer(singleTapGesture)
}

@objc private func singleTapGesture(tap: UITapGestureRecognizer) {
    let screenSize = view.bounds.size
    let tapPoint = tap.location(in: view)
    let x = tapPoint.y / screenSize.height
    let y = 1.0 - tapPoint.x / screenSize.width
    let focusPoint = CGPoint(x: x, y: y)

    recorder.focus(at: focusPoint)

    // call delegate function and pass in the location of the touch
    DispatchQueue.main.async {
        self.cameraDelegate?.filterCam(self, didFocusAtPoint: tapPoint)
    }
}

private func calculateDrawRect(for image: CIImage) -> CGRect {
    let sourceExtent = image.extent
    let sourceAspect = sourceExtent.size.width / sourceExtent.size.height
    let previewAspect = videoPreviewViewBounds.size.width / videoPreviewViewBounds.size.height

    // we want to maintain the aspect ratio of the screen size, so we clip the video image
    var drawRect = sourceExtent

    if sourceAspect > previewAspect {
        // use full height of the video image, and center crop the width
        drawRect.origin.x += (drawRect.size.width - drawRect.size.height * previewAspect) / 2.0
        drawRect.size.width = drawRect.size.height * previewAspect
    } else {
        // use full width of the video image, and center crop the height
        drawRect.origin.y += (drawRect.size.height - drawRect.size.width / previewAspect) / 2.0
        drawRect.size.height = drawRect.size.width / previewAspect
    }

    return drawRect
}

// MARK: - Public

public func startRecording() {
    if !isRecording {
        recorder.startRecording()
    }
}

public func captureImage(imageHandkler: @escaping ((_ image: UIImage)->Void)){

    imageHandkler(videoPreviewView.snapshot)

}

public func stopRecording() {
    if isRecording {
        recorder.stopRecording()
    }
}

public func changeCamera(){
    recorder.changeCamera()

}

}

扩展 FilterCamViewController: RecorderDelegate {

func recorderDidUpdate(drawingImage: CIImage) {
    let drawRect = calculateDrawRect(for: drawingImage)
    videoPreviewView.bindDrawable()
    glClearColor(0.0, 0.0, 0.0, 1.0)
    glClear(GLbitfield(GL_COLOR_BUFFER_BIT))
    ciContext.draw(drawingImage, in: videoPreviewViewBounds, from: drawRect)
    videoPreviewView.display()
}

func recorderDidStartRecording() {
    secLabel?.text = "00:00"
    cameraDelegate?.filterCamDidStartRecording(self)
}

func recorderDidAbortRecording() {}

func recorderDidFinishRecording() {
    cameraDelegate?.filterCamDidFinishRecording(self)
}

func recorderWillStartWriting() {
    secLabel?.text = "Saving..."
}

func recorderDidFinishWriting(outputURL: URL) {
    let fileName = UUID().uuidString
    let tempURL = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent(fileName).appendingPathExtension("mov")
    Composer.compose(videoURL: outputURL, outputURL: tempURL) { [weak self] url, error in
        guard let strongSelf = self else { return }
        if let url = url {
            strongSelf.cameraDelegate?.filterCam(strongSelf, didFinishWriting: url)
        } else if let error = error {
            strongSelf.cameraDelegate?.filterCam(strongSelf, didFailToRecord: error)
        }
    }
}

func recorderDidUpdate(frameRate: Float) {
    fpsLabel?.text = NSString(format: "%.1f fps", frameRate) as String
}

func recorderDidUpdate(recordingSeconds: Int) {
    secLabel?.text = NSString(format: "%02lu:%02lu sec", recordingSeconds / 60, recordingSeconds % 60) as String
}

func recorderDidFail(with error: Error & LocalizedError) {
    cameraDelegate?.filterCam(self, didFailToRecord: error)
}

}

扩展 FilterCamViewController: AVCapturePhotoCaptureDelegate{

public func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {

    if let error = error {
        print(error.localizedDescription)
    }
    if let sampleBuffer = photoSampleBuffer, let previewBuffer = previewPhotoSampleBuffer, let dataImage = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) {

        imageClickeClouser?( UIImage(data: dataImage)!)

    }
}

}

标签: swiftcameraavfoundationglkitglkview

解决方案


推荐阅读