Reputation: 1
Currently, the code only captures the texture from the last frame, but I want to scan the entire area at once. What is the best way to achieve this?
This is the code I'm using for scanning.
I want to export the scanned object to an OBJ, USDZ files, including both color and texture.
import SpriteKit
import UIKit
import ARKit
import SceneKit
import RealityKit
class LabelScene: SKScene {
let label = SKLabelNode()
var onTapped: (() -> Void)? = nil
override public init(size: CGSize){
super.init(size: size)
self.scaleMode = SKSceneScaleMode.resizeFill
label.fontSize = 65
label.fontColor = .blue
label.position = CGPoint(x:frame.midX, y: label.frame.size.height + 50)
self.addChild(label)
}
required init?(coder aDecoder: NSCoder) {
fatalError("Not been implemented")
}
convenience init(size: CGSize, onTapped: @escaping () -> Void) {
self.init(size: size)
self.onTapped = onTapped
}
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
if let onTapped = self.onTapped {
onTapped()
}
}
func setText(text: String) {
label.text = text
}
}
class ScanViewController: UIViewController, ARSCNViewDelegate, ARSessionDelegate {
enum ScanMode {
case noneed
case doing
case done
}
@IBOutlet weak var sceneView: ARSCNView!
var scanMode: ScanMode = .noneed
var originalSource: Any? = nil
var exportButton: UIButton!
lazy var label = LabelScene(size: sceneView.bounds.size) { [weak self] in
self?.rotateMode()
}
override func viewDidLoad() {
super.viewDidLoad()
func setARViewOptions() {
sceneView.scene = SCNScene()
}
func buildConfigure() -> ARWorldTrackingConfiguration {
let configuration = ARWorldTrackingConfiguration()
configuration.environmentTexturing = .automatic
configuration.sceneReconstruction = .mesh
if type(of: configuration).supportsFrameSemantics(.sceneDepth) {
configuration.frameSemantics = .sceneDepth
}
return configuration
}
func setControls() {
label.setText(text: "Scan")
sceneView.overlaySKScene = label
exportButton = UIButton(type: .system)
exportButton.setTitle("Export", for: .normal)
exportButton.addTarget(self, action: #selector(exportToOBJ), for: .touchUpInside)
exportButton.translatesAutoresizingMaskIntoConstraints = false
view.addSubview(exportButton)
NSLayoutConstraint.activate([
exportButton.bottomAnchor.constraint(equalTo: view.safeAreaLayoutGuide.bottomAnchor, constant: -20),
exportButton.centerXAnchor.constraint(equalTo: view.centerXAnchor)
])
}
sceneView.delegate = self
sceneView.session.delegate = self
setARViewOptions()
let configuration = buildConfigure()
sceneView.session.run(configuration)
setControls()
}
@objc func exportToOBJ() {
guard let currentFrame = sceneView.session.currentFrame else { return }
let scene = sceneView.scene
let objData = NSMutableData()
for node in scene.rootNode.childNodes {
if let geometry = node.geometry {
let objString = geometry.toOBJ()
if let data = objString.data(using: .utf8) {
objData.append(data)
}
}
}
let objPath = FileManager.default.temporaryDirectory.appendingPathComponent("scan.obj")
do {
try objData.write(to: objPath)
print("Exported to \(objPath)")
} catch {
print("Failed to write .obj file: \(error)")
}
}
func rotateMode() {
switch self.scanMode {
case .noneed:
self.scanMode = .doing
label.setText(text: "Reset")
originalSource = sceneView.scene.background.contents
sceneView.scene.background.contents = UIColor.black
case .doing:
break
case .done:
scanAllGeometry(needTexture: false)
self.scanMode = .noneed
label.setText(text: "Scan")
sceneView.scene.background.contents = originalSource
}
}
func renderer(_ renderer: SCNSceneRenderer, nodeFor anchor: ARAnchor) -> SCNNode? {
guard scanMode == .noneed else {
return nil
}
guard let anchor = anchor as? ARMeshAnchor,
let frame = sceneView.session.currentFrame else { return nil }
let node = SCNNode()
let geometry = scanGeometry(frame: frame, anchor: anchor, node: node)
node.geometry = geometry
return node
}
func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {
guard scanMode == .noneed else {
return
}
guard let frame = self.sceneView.session.currentFrame else { return }
guard let anchor = anchor as? ARMeshAnchor else { return }
let geometry = self.scanGeometry(frame: frame, anchor: anchor, node: node)
node.geometry = geometry
}
func renderer(_ renderer: SCNSceneRenderer, updateAtTime time: TimeInterval) {
if (self.scanMode == .doing) {
self.scanAllGeometry(needTexture: true)
self.scanMode = .done
}
}
func scanGeometry(frame: ARFrame, anchor: ARMeshAnchor, node: SCNNode, needTexture: Bool = false, cameraImage: UIImage? = nil) -> SCNGeometry {
let camera = frame.camera
let geometry = SCNGeometry(geometry: anchor.geometry, camera: camera, modelMatrix: anchor.transform, needTexture: needTexture)
if let image = cameraImage, needTexture {
geometry.firstMaterial?.diffuse.contents = image
} else {
geometry.firstMaterial?.diffuse.contents = UIColor(red: 0.5, green: 1.0, blue: 0.0, alpha: 0.7)
}
node.geometry = geometry
return geometry
}
func scanAllGeometry(needTexture: Bool) {
guard let frame = sceneView.session.currentFrame else { return }
guard let cameraImage = captureCamera() else {return}
guard let anchors = sceneView.session.currentFrame?.anchors else { return }
let meshAnchors = anchors.compactMap { $0 as? ARMeshAnchor}
for anchor in meshAnchors {
guard let node = sceneView.node(for: anchor) else { continue }
let geometry = scanGeometry(frame: frame, anchor: anchor, node: node, needTexture: needTexture, cameraImage: cameraImage)
node.geometry = geometry
}
}
func captureCamera() -> UIImage? {
guard let frame = sceneView.session.currentFrame else {return nil}
let pixelBuffer = frame.capturedImage
let image = CIImage(cvPixelBuffer: pixelBuffer)
let context = CIContext(options:nil)
guard let cameraImage = context.createCGImage(image, from: image.extent) else {return nil}
return UIImage(cgImage: cameraImage)
}
}
extension SCNGeometry {
func toOBJ() -> String {
var objString = ""
for source in self.sources {
if source.semantic == .vertex {
let vertices = extractVertices(from: source)
for vertex in vertices {
objString += "v \(vertex.x) \(vertex.y) \(vertex.z)\n"
}
}
if source.semantic == .normal {
let normals = extractNormals(from: source)
for normal in normals {
objString += "vn \(normal.x) \(normal.y) \(normal.z)\n"
}
}
}
if let element = self.elements.first {
let indices = extractIndices(from: element)
for indexGroup in indices {
objString += "f"
for index in indexGroup {
objString += " \(index + 1)"
}
objString += "\n"
}
}
return objString
}
private func extractVertices(from source: SCNGeometrySource) -> [vector_float3] {
let stride = source.dataStride
let offset = source.dataOffset
let count = source.vectorCount
let data = source.data as NSData
var result: [vector_float3] = []
for i in 0..<count {
let bytes = data.bytes.advanced(by: i * stride + offset)
let vertex = bytes.assumingMemoryBound(to: vector_float3.self).pointee
result.append(vertex)
}
return result
}
private func extractNormals(from source: SCNGeometrySource) -> [vector_float3] {
let stride = source.dataStride
let offset = source.dataOffset
let count = source.vectorCount
let data = source.data as NSData
var result: [vector_float3] = []
for i in 0..<count {
let bytes = data.bytes.advanced(by: i * stride + offset)
let normal = bytes.assumingMemoryBound(to: vector_float3.self).pointee
result.append(normal)
}
return result
}
private func extractIndices(from element: SCNGeometryElement) -> [[Int]] {
let data = element.data as NSData
var result: [[Int]] = []
let primitiveCount = element.primitiveCount
let bytesPerIndex = element.bytesPerIndex
let indexCount = primitiveCount * (element.data.count / (primitiveCount * bytesPerIndex))
for i in 0..<indexCount {
var indices: [Int] = []
for j in 0..<element.primitiveCount {
let index: Int
let byteOffset = (i * element.primitiveCount + j) * bytesPerIndex
if bytesPerIndex == MemoryLayout<UInt16>.size {
index = Int(data.bytes.advanced(by: byteOffset).assumingMemoryBound(to: UInt16.self).pointee.littleEndian)
} else if bytesPerIndex == MemoryLayout<UInt32>.size {
index = Int(data.bytes.advanced(by: byteOffset).assumingMemoryBound(to: UInt32.self).pointee.littleEndian)
} else {
fatalError("Unsupported index size")
}
indices.append(index)
}
result.append(indices)
}
return result
}
}
I attempted to export the scanned 3D model to an OBJ, USDZ file, including both color and texture. My expectation was that the OBJ, USDZ files would display the complete model with its colors and textures. However, the issue I'm encountering is that the exported file only shows the texture and color information from the last frame of the scan, rather than the entire scanned object.
Upvotes: 0
Views: 39