首页 > 解决方案 > 快速创建 3 个按钮或“交互”ARKit

问题描述

我用 ARKit 识别图像,我在 AR 中将其可视化,我希望这张图片有 3 个点,在 AR 模式下的用户可以看到和触摸以显示与框架相关的文本信息。我怎样才能添加这些“按钮”。我向您展示了我希望它变成的代码和照片。

最后结果

import UIKit
import SceneKit
import ARKit
import AVFoundation
import Foundation

class ViewController: UIViewController, ARSCNViewDelegate, ARSessionDelegate {

    @IBOutlet weak var sessionInfoLabel: UILabel!
    @IBOutlet weak var sceneView: ARSKView!
    let scanningPanel = UIImageView()

    override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated)

        guard ARWorldTrackingConfiguration.isSupported else {
            fatalError("""
                ARKit is not available on this device. For apps that require ARKit
                for core functionality, use the `arkit` key in the key in the
                `UIRequiredDeviceCapabilities` section of the Info.plist to prevent
                the app from installing. (If the app can't be installed, this error
                can't be triggered in a production scenario.)
                In apps where AR is an additive feature, use `isSupported` to
                determine whether to show UI for launching AR experiences.
            """) 
        }
        showOperaImage()
        let configuration = ARWorldTrackingConfiguration()
        configuration.planeDetection = [.horizontal, .vertical]
        sceneView.session.run(configuration)
        sceneView.session.delegate = self
        UIApplication.shared.isIdleTimerDisabled = true
        resetTracking()
    }

    func showOperaImage(){

        scanningPanel.frame = CGRect(x: 50, y: 50, width: self.sceneView.frame.width*0.7, height:self.sceneView.frame.height*0.7)
        scanningPanel.layer.cornerRadius = 10
        scanningPanel.image = UIImage(named: "Croce")
        scanningPanel.layer.masksToBounds = true
        self.sceneView.addSubview(scanningPanel)
        scanningPanel.alpha = 0

        let button = UIButton()
        button.frame = CGRect(x: 40, y: 40, width: self.sceneView.frame.width*0.7, height:self.sceneView.frame.height*0.7)
    }
    override func viewWillDisappear(_ animated: Bool) {
        super.viewWillDisappear(animated)
        sceneView.session.pause()
    }

    var session: ARSession {
        return sceneView.session
    }

    enum MessageType {
        case trackingStateEscalation
        case contentPlacement

        static var all: [MessageType] = [
            .trackingStateEscalation,
            .contentPlacement
        ]
    }

    func cancelScheduledMessage(for messageType: MessageType) {
        timers[messageType]?.invalidate()
        timers[messageType] = nil
    }

    @IBOutlet weak private var messagePanel: UIVisualEffectView!

    private var timers: [MessageType: Timer] = [:]
    private var messageHideTimer: Timer?

    private let displayDuration: TimeInterval = 6

    @IBOutlet weak private var messageLabel: UILabel!

    private func setMessageHidden(_ hide: Bool, animated: Bool) {
        messagePanel.isHidden = false
        guard animated else {
            messagePanel.alpha = hide ? 0 : 1
            return
        }

        UIView.animate(withDuration: 0.2, delay: 0, options: [.beginFromCurrentState], animations: {
            self.messagePanel.alpha = hide ? 0 : 1
        }, completion: nil)
    }

    func showMessage(_ text: String, autoHide: Bool = true) {
        messageHideTimer?.invalidate()
        messageLabel.text = text
        setMessageHidden(false, animated: true)

        if autoHide {
            messageHideTimer = Timer.scheduledTimer(withTimeInterval: displayDuration, repeats: false, block: { [weak self] _ in
                self?.setMessageHidden(true, animated: true)
            })
        }
    }

    func scheduleMessage(_ text: String, inSeconds seconds: TimeInterval, messageType: MessageType) {
        cancelScheduledMessage(for: messageType)

        let timer = Timer.scheduledTimer(withTimeInterval: seconds, repeats: false, block: { [weak self] timer in
            self?.showMessage(text)
            timer.invalidate()
        })
        timers[messageType] = timer
    }


    func resetTracking() {

        guard let referenceImages = ARReferenceImage.referenceImages(inGroupNamed: "AR Resources", bundle: nil) else {
            fatalError("Missing expected asset catalog resources.")
        }

        let configuration = ARWorldTrackingConfiguration()
        configuration.detectionImages = referenceImages
        session.run(configuration, options: [.resetTracking, .removeExistingAnchors])

    }

    let updateQueue = DispatchQueue(label: Bundle.main.bundleIdentifier! +
        ".serialSceneKitQueue")

    func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
        guard let imageAnchor = anchor as? ARImageAnchor else { return }
        let referenceImage = imageAnchor.referenceImage
        updateQueue.async {
            let plane = SCNPlane(width: referenceImage.physicalSize.width,
                                 height: referenceImage.physicalSize.height)

            let planeNode = SCNNode(geometry: plane)

            plane.firstMaterial?.diffuse.contents
                = UIImage(named: "Croce")

            planeNode.eulerAngles.x = -.pi / 2
            planeNode.runAction(self.imageHighlightAction)
            node.addChildNode(planeNode)
        }

        func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {
            guard let planeAnchor = anchor as?  ARPlaneAnchor,
                let planeNode = node.childNodes.first,
                let plane = planeNode.geometry as? SCNPlane
                else { return }

            planeNode.simdPosition = float3(planeAnchor.center.x, 0, planeAnchor.center.z)

            plane.width = CGFloat(planeAnchor.extent.x)
            plane.height = CGFloat(planeAnchor.extent.z)
        }

        func session(_ session: ARSession, didAdd anchors: [ARAnchor]) {
            guard let frame = session.currentFrame else { return }
            updateSessionInfoLabel(for: frame, trackingState: frame.camera.trackingState)
        }

        func session(_ session: ARSession, didRemove anchors: [ARAnchor]) {
            guard let frame = session.currentFrame else { return }
            updateSessionInfoLabel(for: frame, trackingState: frame.camera.trackingState)
        }

        func session(_ session: ARSession, cameraDidChangeTrackingState camera: ARCamera) {
            updateSessionInfoLabel(for: session.currentFrame!, trackingState: camera.trackingState)
        }

        func sessionWasInterrupted(_ session: ARSession) {
            sessionInfoLabel.text = "Session was interrupted"
        }

        func sessionInterruptionEnded(_ session: ARSession) {
            sessionInfoLabel.text = "Session interruption ended"
            resetTracking()
        }

        func session(_ session: ARSession, didFailWithError error: Error) {
            sessionInfoLabel.text = "Session failed: \(error.localizedDescription)"
            resetTracking()
        }

        func updateSessionInfoLabel(for frame: ARFrame, trackingState: ARCamera.TrackingState) {
            let message: String

            switch trackingState {
            case .normal where frame.anchors.isEmpty:
                message = "Move the device around to detect horizontal surfaces."

            case .notAvailable:
                message = "Tracking unavailable."

            case .limited(.excessiveMotion):
                message = "Tracking limited - Move the device more slowly."

            case .limited(.insufficientFeatures):
                message = "Tracking limited - Point the device at an area with visible surface detail, or improve lighting conditions."

            case .limited(.initializing):
                message = "Initializing AR session."

            default:
                message = ""
            }
        }

        func resetTrackin() {
            let configuration = ARWorldTrackingConfiguration()
            configuration.planeDetection = .horizontal
            sceneView.session.run(configuration, options: [.resetTracking, .removeExistingAnchors])
        }
    }
    var imageHighlightAction: SCNAction {
        return .sequence([
            .wait(duration: 100.25),
            .fadeOpacity(to: 0, duration: 0.25),
            .fadeOpacity(to: 0, duration: 0.25),
            .fadeOpacity(to: 0, duration: 0.25),
            .fadeOut(duration: 0.5),
            .removeFromParentNode()
            ])
    }
}

标签: iosswiftxcodebuttonarkit

解决方案


我不确定这是解决您的问题的最强大或最具可扩展性的方法,但我设法相当简单地实现了它。

首先我得到一张我知道是 10cm x 10cm 的图像: 在此处输入图像描述

然后我将它添加为一个ARReferenceImage并将其设置physicalWidthheight(你猜对了)0.1mx 0.1m。

现在,由于我们事先知道可检测图像的大小,我创建了一个简单的SCNScene.

在此处输入图像描述

我制作了一个SCNPlane与图像尺寸相同的图像,并将其漫反射内容设置为我们的 targetImage。

SCNTorus's然后我在我想检测的地方添加了三个。显然这只是我很傻,但你会明白的。

然后,我为每个伪按钮设置了一个名称:(a) Spike (Top),(b) Hand,(c) Tail。

然后我将其设置SCNPlanehidden,因为我们实际上不需要查看它。

设置好 ARImageTrackingConfiguration 后,我使用委托回调覆盖我的场景:

//---------------------------
// MARK: -  ARSCNViewDelegate
//---------------------------

extension ViewController: ARSCNViewDelegate{

    func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {

        //1. Check We Have Detected An ARImageAnchor & Check It's The One We Want
        guard let validImageAnchor = anchor as? ARImageAnchor,
              let targetName = validImageAnchor.referenceImage.name, targetName == "TargetImage" else { return}

        //2. Since We Know The Size Of Our Target & Have Created Our Overlay Simply Add It To The Node
        guard let interactiveTarget =  SCNScene(named: "art.scnassets/Overlay.scn") else { return }
        let overlayNode = interactiveTarget.rootNode
        overlayNode.eulerAngles.x = -.pi / 2
        node.addChildNode(overlayNode)

    }
}

这与目标图像非常吻合,所以现在我必须处理交互。

我只是用来touchesBegan检测我们是否击中了任何我们想要SCNNode的 s:

override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {

    //1. Get The Current Touch Location
    //2. Check We Have Hit An SCNNode By Performing An SCNHitTest
    //3. Check Our HitNode Has A Name & It Isn't The Target
    guard let currentTouchLocation = touches.first?.location(in: self.augmentedRealityView),
        let hitTestNode = self.augmentedRealityView.hitTest(currentTouchLocation, options: nil).first?.node,
        let name = hitTestNode.name, name != "Target" else { return }


    //4. Update The UI That We Have Touched An Area
    infoLabel.text = "Ya! Don't Touch My \(name)"

    //5. Reset The Text
    DispatchQueue.main.asyncAfter(deadline: .now() + 2) { self.infoLabel.text = "" }
}

这产生了这样的东西:

在此处输入图像描述

这是完整的代码:

//---------------------------
// MARK: -  ARSCNViewDelegate
//---------------------------

extension ViewController: ARSCNViewDelegate{

    func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {

        //1. Check We Have Detected An ARImageAnchor & Check It's The One We Want
        guard let validImageAnchor = anchor as? ARImageAnchor,
              let targetName = validImageAnchor.referenceImage.name, targetName == "TargetImage" else { return}

        //2. Since We Know The Size Of Our Target & Have Created Our Overlay Simply Add It To The Node
        guard let interactiveTarget =  SCNScene(named: "art.scnassets/Overlay.scn") else { return }
        let overlayNode = interactiveTarget.rootNode
        overlayNode.eulerAngles.x = -.pi / 2
        node.addChildNode(overlayNode)

    }
}

class ViewController: UIViewController {

    @IBOutlet var augmentedRealityView: ARSCNView!
    @IBOutlet weak var infoLabel: UILabel!

    //-----------------------
    // MARK: - View LifeCycle
    //-----------------------

    override func viewDidLoad() {
        super.viewDidLoad()

        augmentedRealityView.delegate = self

        infoLabel.text = ""
    }

    override func viewWillAppear(_ animated: Bool) {
        super.viewWillAppear(animated)

        //1. Get The Images We Wish To Track
        guard let imagesToTrack = ARReferenceImage.referenceImages(inGroupNamed: "AR Resources", bundle: nil) else {
            fatalError("Missing Reference Images")
        }

        //2. Set Up Our ARTracking Configuration
        let configuration = ARImageTrackingConfiguration()
        configuration.maximumNumberOfTrackedImages = 1

        //3. Assign Our Detection Images
        configuration.trackingImages = imagesToTrack

        //4. Run The Session
        augmentedRealityView.session.run(configuration)
    }

    override func viewWillDisappear(_ animated: Bool) {

        super.viewWillDisappear(animated)

        augmentedRealityView.session.pause()
    }

    //--------------------------
    // MARK: - Touch Interaction
    //--------------------------

    override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {

        //1. Get The Current Touch Location
        //2. Check We Have Hit An SCNNode By Performing An SCNHitTest
        //3. Check Our HitNode Has A Name & It Isn't The Target
        guard let currentTouchLocation = touches.first?.location(in: self.augmentedRealityView),
            let hitTestNode = self.augmentedRealityView.hitTest(currentTouchLocation, options: nil).first?.node,
            let name = hitTestNode.name, name != "Target" else { return }


        //4. Update The UI That We Have Touched An Area
        infoLabel.text = "Ya! Don't Touch My \(name)"

        //5. Reset The Text
        DispatchQueue.main.asyncAfter(deadline: .now() + 2) { self.infoLabel.text = "" }
    }


}

正如我所说,也许不是最强大或可扩展的解决方案,但它肯定会为您指明正确的方向......


推荐阅读