首页 > 解决方案 > iOS在Cameraview和捕获的图像上添加水印或叠加

问题描述

我正在尝试使用带有 Swift 5 的 iOS 13.6 在相机视图和捕获的图像上添加水印。这是我的代码:

import UIKit
import AVFoundation
import Foundation

class ViewController: UIViewController, AVCapturePhotoCaptureDelegate {

    @IBOutlet weak var navigationBar: UINavigationBar!
    @IBOutlet weak var shapeLayer: UIView!
    @IBOutlet weak var imgOverlay: UIImageView!
    
    var captureSession = AVCaptureSession()
    let stillImageOutput = AVCapturePhotoOutput()
    var previewLayer : AVCaptureVideoPreviewLayer?

    var captureDevice : AVCaptureDevice?
    var backCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back)
    
    
    override func viewDidLoad() {
        super.viewDidLoad()
        // Do any additional setup after loading the view.
        
        captureSession.sessionPreset = AVCaptureSession.Preset.high
        
        if #available(iOS 13.6, *){
            let captureDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back)
            do {
                let input = try AVCaptureDeviceInput(device: captureDevice!)
                captureSession = AVCaptureSession()
                captureSession.addInput(input)
                previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
                previewLayer?.frame = view.layer.bounds
                shapeLayer.layer.addSublayer(previewLayer!)
                captureSession.startRunning()
            } catch {
                print("error")
            }
        }
        
        if let devices = AVCaptureDevice.devices() as? [AVCaptureDevice] {
            // Loop through all the capture devices on this phone
            for device in devices {
                // Make sure this particular device supports video
                if (device.hasMediaType(AVMediaType.video)) {
                    // Finally check the position and confirm we've got the back camera
                    if(device.position == AVCaptureDevice.Position.back) {
                        captureDevice = device
                        if captureDevice != nil {
                            print("Capture device found")
                            beginSession()
                        }
                    }
                }
            }
        }
    }
    
    func beginSession() {

        
        do {
            try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice!))
            stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG]

            if captureSession.canAddOutput(stillImageOutput) {
                captureSession.addOutput(stillImageOutput)
            }

        }
        catch {
            print("error: \(error.localizedDescription)")
        }
 
        
        let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)

        
        // this is what displays the camera view. But - it's on TOP of the drawn view, and under the overview. ??
        self.view.layer.addSublayer(previewLayer)
        previewLayer.frame = self.view.layer.frame


        imgOverlay.frame = self.view.frame
        imgOverlay.image = self.drawCirclesOnImage(fromImage: nil, targetSize: imgOverlay.bounds.size)

        self.view.bringSubviewToFront(navigationBar)
        self.view.bringSubviewToFront(imgOverlay)
        //self.view.bringSubview(toFront: btnCapture)
        // don't use shapeLayer anymore...
        //      self.view.bringSubview(toFront: shapeLayer)


        captureSession.startRunning()
        print("Capture session running")

    }

    func getImageWithColor(color: UIColor, size: CGSize) -> UIImage {
        let rect = CGRect(origin: CGPoint(x: 0, y: 0), size: CGSize(width: size.width, height: size.height))
        UIGraphicsBeginImageContextWithOptions(size, false, 0)
        color.setFill()
        UIRectFill(rect)
        let image: UIImage = UIGraphicsGetImageFromCurrentImageContext()!
        UIGraphicsEndImageContext()
        return image
    }

    func drawCirclesOnImage(fromImage: UIImage? = nil, targetSize: CGSize? = CGSize.zero) -> UIImage? {

        if fromImage == nil && targetSize == CGSize.zero {
            return nil
        }

        var tmpimg: UIImage?

        if targetSize == CGSize.zero {

            tmpimg = fromImage

        } else {

            tmpimg = getImageWithColor(color: UIColor.clear, size: targetSize!)

        }

        guard let img = tmpimg else {
            return nil
        }

        let imageSize = img.size
        let scale: CGFloat = 0
        UIGraphicsBeginImageContextWithOptions(imageSize, false, scale)

        img.draw(at: CGPoint.zero)

        let w = imageSize.width

        let midX = imageSize.width / 2
        let midY = imageSize.height / 2

        // red circles - radius in %
        let circleRads = [ 0.07, 0.13, 0.17, 0.22, 0.29, 0.36, 0.40, 0.48, 0.60, 0.75 ]

        // center "dot" - radius is 1.5%
        var circlePath = UIBezierPath(arcCenter: CGPoint(x: midX,y: midY), radius: CGFloat(w * 0.015), startAngle: CGFloat(0), endAngle:CGFloat(M_PI * 2), clockwise: true)

        UIColor.red.setFill()
        circlePath.stroke()
        circlePath.fill()

        // blue circle is between first and second red circles
        circlePath = UIBezierPath(arcCenter: CGPoint(x: midX,y: midY), radius: w * CGFloat((circleRads[0] + circleRads[1]) / 2.0), startAngle: CGFloat(0), endAngle:CGFloat(M_PI * 2), clockwise: true)

        UIColor.blue.setStroke()
        circlePath.lineWidth = 2.5
        circlePath.stroke()

        UIColor.red.setStroke()

        for pct in circleRads {

            let rad = w * CGFloat(pct)

            circlePath = UIBezierPath(arcCenter: CGPoint(x: midX, y: midY), radius: CGFloat(rad), startAngle: CGFloat(0), endAngle:CGFloat(M_PI * 2), clockwise: true)

            circlePath.lineWidth = 2.5
            circlePath.stroke()

        }

        let newImage = UIGraphicsGetImageFromCurrentImageContext()

        UIGraphicsEndImageContext()

        return newImage
    }
    
    
    func saveToCamera() {
        
        if let videoConnection = stillImageOutput.connection(with: AVMediaType.video) {
            
              stillImageOutput.captureStillImageAsynchronously(from: videoConnection, completionHandler: { (CMSampleBuffer, Error) in

                if let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(CMSampleBuffer) {
                    if let cameraImage = UIImage(data: imageData) {
                        // cameraImage is the camera preview image.

                        // I need to combine/merge it with the myImage that is actually the blue circles.

                        // This converts the UIView of the bllue circles to an image. Uses 'extension' at top of code.
                        let myImage = UIImage(view: self.shapeLayer)
                        print("converting myImage to an image")

                        let newImage = self.composite(image:cameraImage, overlay:(myImage), scaleOverlay:true)
                       UIImageWriteToSavedPhotosAlbum(newImage!, nil, nil, nil)

                    }
                }
            })
        }
    }



    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
        // Dispose of any resources that can be recreated.
    }
   
}

上面给出的示例中的一些代码与 Swift 3 兼容,但我无法像下面给出的代码那样转换它:

do {
            try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice!))
            stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG]

            if captureSession.canAddOutput(stillImageOutput) {
                captureSession.addOutput(stillImageOutput)
            }

        }
        catch {
            print("error: \(error.localizedDescription)")
        }

而且这种方法也兼容 Swift 3,但能够在 Swift 5 上进行转换:

func saveToCamera() {
        
        if let videoConnection = stillImageOutput.connection(with: AVMediaType.video) {
            
              stillImageOutput.captureStillImageAsynchronously(from: videoConnection, completionHandler: { (CMSampleBuffer, Error) in

                if let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(CMSampleBuffer) {
                    if let cameraImage = UIImage(data: imageData) {
                        // cameraImage is the camera preview image.

                        // I need to combine/merge it with the myImage that is actually the blue circles.

                        // This converts the UIView of the bllue circles to an image. Uses 'extension' at top of code.
                        let myImage = UIImage(view: self.shapeLayer)
                        print("converting myImage to an image")

                        let newImage = self.composite(image:cameraImage, overlay:(myImage), scaleOverlay:true)
                       UIImageWriteToSavedPhotosAlbum(newImage!, nil, nil, nil)

                    }
                }
            })
        }
    }

所以基本上我试图在cameraview上添加图像或形状,然后我想在捕获的图像上添加水印。我也无法在互联网上找到任何示例,我们可以在其中添加水印/覆盖图像到 cameraview 并将其保存为捕获图像。如果有人可以分享 Swift 4/5 兼容示例,那将会很有帮助。

标签: iosswiftiphonecameraavfoundation

解决方案


推荐阅读