Reputation:
I have a problem with recognizing an image. As you can see from the video that I have attached, frame the desired picture and gives me the image in black and white.
This is the code I am using, my aim is to recognize the image of a particular painting and to see it in colors in AR.
PS: the image for recognition is uploaded to the color assets in PNG format.
import UIKit
import SceneKit
import ARKit
class ViewController: UIViewController, ARSCNViewDelegate, ARSessionDelegate {
// MARK: - IBOutlets
@IBOutlet weak var sessionInfoLabel: UILabel!
@IBOutlet weak var sceneView: ARSCNView!
// MARK: - View Life Cycle
/// - Tag: StartARSession
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
guard ARWorldTrackingConfiguration.isSupported else {
fatalError("""
ARKit is not available on this device. For apps that require ARKit
for core functionality, use the `arkit` key in the key in the
`UIRequiredDeviceCapabilities` section of the Info.plist to prevent
the app from installing. (If the app can't be installed, this error
can't be triggered in a production scenario.)
In apps where AR is an additive feature, use `isSupported` to
determine whether to show UI for launching AR experiences.
""") // For details, see https://developer.apple.com/documentation/arkit
}
// Start the view's AR session with a configuration that uses the rear camera,
// device position and orientation tracking, and plane detection.
let configuration = ARWorldTrackingConfiguration()
configuration.planeDetection = [.horizontal, .vertical]
sceneView.session.run(configuration)
let scanningPanel = UIImageView()
scanningPanel.backgroundColor = UIColor(white: 0.33, alpha: 0.6)
scanningPanel.layer.masksToBounds = true
scanningPanel.frame = CGRect(x: -2, y: self.sceneView.frame.height-270, width: 178, height: 50)
scanningPanel.layer.cornerRadius = 10
let scanInfo = UILabel(frame: CGRect(x: 8, y: self.sceneView.frame.height-268, width: 160, height: 45))
scanInfo.textAlignment = .left
scanInfo.font = scanInfo.font.withSize(15)
scanInfo.textColor = UIColor.white
scanInfo.text = "SCAN A SURFACE"
self.sceneView.addSubview(scanningPanel)
self.sceneView.addSubview(scanInfo)
// Set a delegate to track the number of plane anchors for providing UI feedback.
sceneView.session.delegate = self
// Prevent the screen from being dimmed after a while as users will likely
// have long periods of interaction without touching the screen or buttons.
UIApplication.shared.isIdleTimerDisabled = true
// Show debug UI to view performance metrics (e.g. frames per second).
sceneView.showsStatistics = true
resetTracking()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
// Pause the view's AR session.
sceneView.session.pause()
}
// MARK: - ARSCNViewDelegate
var session: ARSession {
return sceneView.session
}
enum MessageType {
case trackingStateEscalation
case contentPlacement
static var all: [MessageType] = [
.trackingStateEscalation,
.contentPlacement
]
}
func cancelScheduledMessage(for messageType: MessageType) {
timers[messageType]?.invalidate()
timers[messageType] = nil
}
@IBOutlet weak private var messagePanel: UIVisualEffectView!
private var timers: [MessageType: Timer] = [:]
private var messageHideTimer: Timer?
private let displayDuration: TimeInterval = 6
@IBOutlet weak private var messageLabel: UILabel!
private func setMessageHidden(_ hide: Bool, animated: Bool) {
// The panel starts out hidden, so show it before animating opacity.
messagePanel.isHidden = false
guard animated else {
messagePanel.alpha = hide ? 0 : 1
return
}
UIView.animate(withDuration: 0.2, delay: 0, options: [.beginFromCurrentState], animations: {
self.messagePanel.alpha = hide ? 0 : 1
}, completion: nil)
}
func showMessage(_ text: String, autoHide: Bool = true) {
// Cancel any previous hide timer.
messageHideTimer?.invalidate()
messageLabel.text = text
// Make sure status is showing.
setMessageHidden(false, animated: true)
if autoHide {
messageHideTimer = Timer.scheduledTimer(withTimeInterval: displayDuration, repeats: false, block: { [weak self] _ in
self?.setMessageHidden(true, animated: true)
})
}
}
func scheduleMessage(_ text: String, inSeconds seconds: TimeInterval, messageType: MessageType) {
cancelScheduledMessage(for: messageType)
let timer = Timer.scheduledTimer(withTimeInterval: seconds, repeats: false, block: { [weak self] timer in
self?.showMessage(text)
timer.invalidate()
})
timers[messageType] = timer
}
func resetTracking() {
guard let referenceImages = ARReferenceImage.referenceImages(inGroupNamed: "AR Resources", bundle: nil) else {
fatalError("Missing expected asset catalog resources.")
}
let configuration = ARWorldTrackingConfiguration()
configuration.detectionImages = referenceImages
session.run(configuration, options: [.resetTracking, .removeExistingAnchors])
//statusViewController.scheduleMessage("Look around to detect images", inSeconds: 7.5, messageType: .contentPlacement)
}
let updateQueue = DispatchQueue(label: Bundle.main.bundleIdentifier! +
".serialSceneKitQueue")
/// - Tag: PlaceARContent
func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
// Place content only for anchors found by plane detection.
guard let imageAnchor = anchor as? ARImageAnchor else { return }
let referenceImage = imageAnchor.referenceImage
updateQueue.async {
// Create a SceneKit plane to visualize the plane anchor using its position and extent.
let plane = SCNPlane(width: referenceImage.physicalSize.width,
height: referenceImage.physicalSize.height)
let planeNode = SCNNode(geometry: plane)
planeNode.geometry?.materials.first?.diffuse.contents = UIImage(named: "Crocefissione")
plane.firstMaterial?.diffuse.contents = UIImage(named: "Crocefissione")
//planeNode.geometry?.firstMaterial?.diffuse.contents = ARReferenceImage.referenceImages(inGroupNamed: "AR Resources", bundle: nil)
// `SCNPlane` is vertically oriented in its local coordinate space, so
// rotate the plane to match the horizontal orientation of `ARPlaneAnchor`.
planeNode.eulerAngles.x = -.pi / 2
planeNode.runAction(self.imageHighlightAction)
// Make the plane visualization semitransparent to clearly show real-world placement.
// Add the plane visualization to the ARKit-managed node so that it tracks
// changes in the plane anchor as plane estimation continues.
node.addChildNode(planeNode)
}
/// - Tag: UpdateARContent
func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {
// Update content only for plane anchors and nodes matching the setup created in `renderer(_:didAdd:for:)`.
guard let planeAnchor = anchor as? ARPlaneAnchor,
let planeNode = node.childNodes.first,
let plane = planeNode.geometry as? SCNPlane
else { return }
// Plane estimation may shift the center of a plane relative to its anchor's transform.
planeNode.simdPosition = float3(planeAnchor.center.x, 0, planeAnchor.center.z)
// Plane estimation may also extend planes, or remove one plane to merge its extent into another.
plane.width = CGFloat(planeAnchor.extent.x)
plane.height = CGFloat(planeAnchor.extent.z)
}
// MARK: - ARSessionDelegate
func session(_ session: ARSession, didAdd anchors: [ARAnchor]) {
guard let frame = session.currentFrame else { return }
//updateSessionInfoLabel(for: frame, trackingState: frame.camera.trackingState)
}
func session(_ session: ARSession, didRemove anchors: [ARAnchor]) {
guard let frame = session.currentFrame else { return }
//updateSessionInfoLabel(for: frame, trackingState: frame.camera.trackingState)
}
func session(_ session: ARSession, cameraDidChangeTrackingState camera: ARCamera) {
//updateSessionInfoLabel(for: session.currentFrame!, trackingState: camera.trackingState)
}
// MARK: - ARSessionObserver
func sessionWasInterrupted(_ session: ARSession) {
// Inform the user that the session has been interrupted, for example, by presenting an overlay.
sessionInfoLabel.text = "Session was interrupted"
}
func sessionInterruptionEnded(_ session: ARSession) {
// Reset tracking and/or remove existing anchors if consistent tracking is required.
sessionInfoLabel.text = "Session interruption ended"
resetTracking()
}
func session(_ session: ARSession, didFailWithError error: Error) {
// Present an error message to the user.
sessionInfoLabel.text = "Session failed: \(error.localizedDescription)"
resetTracking()
}
// MARK: - Private methods
func updateSessionInfoLabel(for frame: ARFrame, trackingState: ARCamera.TrackingState) {
// Update the UI to provide feedback on the state of the AR experience.
let message: String
switch trackingState {
case .normal where frame.anchors.isEmpty:
// No planes detected; provide instructions for this app's AR interactions.
message = "Move the device around to detect horizontal surfaces."
case .notAvailable:
message = "Tracking unavailable."
case .limited(.excessiveMotion):
message = "Tracking limited - Move the device more slowly."
case .limited(.insufficientFeatures):
message = "Tracking limited - Point the device at an area with visible surface detail, or improve lighting conditions."
case .limited(.initializing):
message = "Initializing AR session."
default:
// No feedback needed when tracking is normal and planes are visible.
// (Nor when in unreachable limited-tracking states.)
message = ""
}
}
func resetTrackin() {
let configuration = ARWorldTrackingConfiguration()
configuration.planeDetection = .horizontal
sceneView.session.run(configuration, options: [.resetTracking, .removeExistingAnchors])
}
}
var imageHighlightAction: SCNAction {
return .sequence([
.wait(duration: 100.25),
.fadeOpacity(to: 0, duration: 0.25),
.fadeOpacity(to: 0, duration: 0.25),
.fadeOpacity(to: 0, duration: 0.25),
.fadeOut(duration: 0.5),
.removeFromParentNode()
])
}
}
Upvotes: 2
Views: 1195
Reputation: 7385
I appreciate this is very late, and I am sure that now you have solved the issue, but nonetheless, when answering another question Get ARReferenceImage, I experienced the same issue as you.
Looking at your code, I am assume that when you set the following:
plane.firstMaterial?.diffuse.contents = UIImage(named: "Crocefissione")
That Crocefissione
refers to your actual ARReferenceImage
?
If so then I believe that some magic happens behind the scenes and that Xcode converts these to GrayScale
, which it appears is how ARKit
is able do detect an ARReferenceImage
.
If I am wrong, please let me know ^_______^.
Anyway, a possible solution therefore is todo something like this.
Copy each of ARReferenceImage's
into the Assets Catalogue
and give it a prefix
e.g. Colour(yourImageName):
Then you would need to change the function slightly to display the correctly coloured image e.g:
plane.firstMaterial?.diffuse.contents = UIImage(named: "ColourCrocefissione")
Upvotes: 1