• Oct 23, 2024

RealityKit & ARKit in SwiftUI — Move, Scale Virtual Item

  • DevTechie

RealityKit enables us to add gestures to control the behavior of the virtual object. We can add gestures to scale, translate or rotate the object.

RealityKit enables us to add gestures to control the behavior of the virtual object. We can add gestures to scale, translate or rotate the object.

Let’s start by creating an AR view.

import SwiftUI
import RealityKit

struct ContentView : View {
    var body: some View {
        ARViewContainer().ignoresSafeArea()
    }
}

struct ARViewContainer: UIViewRepresentable {
    
    func makeUIView(context: Context) -> ARView {
        
        let arView = ARView(frame: .zero)
        arView.addGestureRecognizer(UITapGestureRecognizer(target: context.coordinator, action: #selector(ARCoordinator.didTapItem(tapGesture:))))
        context.coordinator.view = arView
        arView.session.delegate = context.coordinator
        
        return arView
        
    }
    
    func updateUIView(_ uiView: ARView, context: Context) {}
    
    func makeCoordinator() -> ARCoordinator {
            ARCoordinator()
    }
}

ARCoordinator will help us with the tap gesture to add objects on detected horizontal surface.

final class ARCoordinator: NSObject, ARSessionDelegate {
    
    weak var view: ARView?
    
    @objc func didTapItem(
        tapGesture: UITapGestureRecognizer
    ) {
        guard let view = self.view else { return }
        
        let tapLocation = tapGesture.location(in: view)
        let raycastResults = view.raycast(from: tapLocation, allowing: .estimatedPlane, alignment: .horizontal)
        
        guard let firstResult = raycastResults.first else { return }
        
        let anchor = AnchorEntity(raycastResult: firstResult)
        view.scene.addAnchor(anchor)
    }
}

Time to add a model, we will add a simple box created using ModelEntity class.

final class ARCoordinator: NSObject, ARSessionDelegate {
    weak var view: ARView?

    @objc func didTapItem(
        tapGesture: UITapGestureRecognizer
    ) {
        guard let view = self.view else { return }
        
        let tapLocation = tapGesture.location(in: view)
        let raycastResults = view.raycast(from: tapLocation, allowing: .estimatedPlane, alignment: .horizontal)
        
        guard let firstResult = raycastResults.first else { return }
        
        let anchor = AnchorEntity(raycastResult: firstResult)
        view.scene.addAnchor(anchor)
        
        let model = ModelEntity(
            mesh: MeshResource.generateBox(
                size: 0.1,
                cornerRadius: 0.0005
            ),
            materials: [SimpleMaterial(
                color: .orange.withAlphaComponent(
                    0.8
                ),
                isMetallic: true
            )]
        )

Next, we will generate collision shape using generateCollisionShapes function in the ModelEntity class. This method helps us creates the shape used to detect collisions between two entities that have collision components.

final class ARCoordinator: NSObject, ARSessionDelegate {
    weak var view: ARView?

    @objc func didTapItem(
        tapGesture: UITapGestureRecognizer
    ) {
        guard let view = self.view else { return }
        
        let tapLocation = tapGesture.location(in: view)
        let raycastResults = view.raycast(from: tapLocation, allowing: .estimatedPlane, alignment: .horizontal)
        
        guard let firstResult = raycastResults.first else { return }
        
        let anchor = AnchorEntity(raycastResult: firstResult)
        view.scene.addAnchor(anchor)
        
        let model = ModelEntity(
            mesh: MeshResource.generateBox(
                size: 0.1,
                cornerRadius: 0.0005
            ),
            materials: [SimpleMaterial(
                color: .orange.withAlphaComponent(
                    0.8
                ),
                isMetallic: true
            )]
        )
        
        model.generateCollisionShapes(recursive: true)

Once we have the model ready, we can add it to the anchor entity.

final class ARCoordinator: NSObject, ARSessionDelegate {
    weak var view: ARView?
    @objc func didTapItem(
        tapGesture: UITapGestureRecognizer
    ) {
        guard let view = self.view else { return }
        
        let tapLocation = tapGesture.location(in: view)
        let raycastResults = view.raycast(from: tapLocation, allowing: .estimatedPlane, alignment: .horizontal)
        
        guard let firstResult = raycastResults.first else { return }
        
        let anchor = AnchorEntity(raycastResult: firstResult)
        view.scene.addAnchor(anchor)
        
        let model = ModelEntity(
            mesh: MeshResource.generateBox(
                size: 0.1,
                cornerRadius: 0.0005
            ),
            materials: [SimpleMaterial(
                color: .orange.withAlphaComponent(
                    0.8
                ),
                isMetallic: true
            )]
        )
        
        model.generateCollisionShapes(recursive: true)
        
        anchor.addChild(model)

We are ready to add gestures to this object with the help of installGestures function of the ARView. This function installs standard gestures onto the given entity, configured to be recognized simultaneously. It takes two parameters gestures which is the gesture types to install and the entity with which to associate the gesture recognizers.

We can apply rotation, scale or translation gestures individually, in combination or we can apply all of them.

Let’s try one by one. First we will try individual gesture with translation gesture.

import ARKit
import SwiftUI
import RealityKit

final class ARCoordinator: NSObject, ARSessionDelegate {
    
    weak var view: ARView?
    
    @objc func didTapItem(
        tapGesture: UITapGestureRecognizer
    ) {
        guard let view = self.view else { return }
        
        let tapLocation = tapGesture.location(in: view)
        let raycastResults = view.raycast(from: tapLocation, allowing: .estimatedPlane, alignment: .horizontal)
        
        guard let firstResult = raycastResults.first else { return }
        
        let anchor = AnchorEntity(raycastResult: firstResult)
        view.scene.addAnchor(anchor)
        
        let model = ModelEntity(
            mesh: MeshResource.generateBox(
                size: 0.1,
                cornerRadius: 0.0005
            ),
            materials: [SimpleMaterial(
                color: .orange.withAlphaComponent(
                    0.8
                ),
                isMetallic: true
            )]
        )
        
        model.generateCollisionShapes(recursive: true)
        
        anchor.addChild(model)
        
        view.installGestures(.translation, for: model)
    }
}

To apply scale gesture, just replace translation with

view.installGestures(.scale, for: model)

Apply rotation by

view.installGestures(.rotation, for: model)

We can combine two gestures by adding them as an array value.

view.installGestures([.scale, .translation], for: model)

We can apply all of the gestures by setting .all as the gesture parameter.

view.installGestures(.all, for: model)