Reputation: 51
I use the code to add a picture texture in RealityKit and it works fine.
var material = SimpleMaterial()
material.baseColor = try! .texture(.load(named: "image.jpg"))
I try to use this code to add a video file to be a texture, but it crashes!!!
guard let url = Bundle.main.url(forResource: "data", withExtension: "mp4") else {
return
}
material.baseColor = try! .texture(.load(contentsOf: url))
How can I add a video file?
Upvotes: 5
Views: 2544
Reputation: 58043
You can use Video Material
in RealityKit 2.0 and higher. RealityKit v1.0 doesn't support video materials. Here's a code showing you how to apply a video material:
Approach A
import SwiftUI
import RealityKit
import AVFoundation
struct ARViewContainer : UIViewRepresentable {
let arView = ARView(frame: .zero)
let anchor = AnchorEntity()
func makeUIView(context: Context) -> ARView {
anchor.position.z += 1.0
self.loadVideoMaterial()
return arView
}
func loadVideoMaterial() {
guard let pathToVideo = Bundle.main.path(forResource: "video",
ofType: "mp4")
else { return }
let videoURL = URL(fileURLWithPath: pathToVideo)
let avPlayer = AVPlayer(url: videoURL)
// 16:9 video
let mesh = MeshResource.generatePlane(width: 1.92, height: 1.08)
let material = VideoMaterial(avPlayer: avPlayer)
let planeModel = ModelEntity(mesh: mesh, materials: [material])
anchor.addChild(planeModel)
arView.scene.anchors.append(anchor)
avPlayer.volume = 0.05
avPlayer.play()
}
func updateUIView(_ view: ARView, context: Context) { }
}
struct ContentView : View {
var body: some View {
ARViewContainer().ignoresSafeArea()
}
}
Also, you can add RealityKit's VideoMaterial
this way:
Approach B
// AVPLAYER and PlayerItem
let url = Bundle.main.url(forResource: "video", withExtension: "mp4")
let asset = AVAsset(url: url!)
let playerItem = AVPlayerItem(asset: asset)
let avPlayer = AVPlayer()
// ENTITY
let mesh = MeshResource.generateSphere(radius: 1)
let material = VideoMaterial(avPlayer: avPlayer)
let entity = ModelEntity(mesh: mesh, materials: [material])
// ANCHOR
let anchor = AnchorEntity(world: [0,0,-10])
anchor.addChild(entity)
arView.scene.anchors.append(anchor)
// PLAYBACK
avPlayer.replaceCurrentItem(with: playerItem)
avPlayer.play()
In visionOS, the VideoPlayerComponent
is another way to create a video scene (including for HEVC video with transparency).
import SwiftUI
import AVKit
import RealityKit
struct ContentView: View {
@State var player: AVPlayer?
let screen = Entity()
var body: some View {
RealityView { content in
let url = Bundle.main.url(forResource: "puppets", withExtension: "mov")!
player = AVPlayer(url: url)
screen.components[VideoPlayerComponent.self] = .init(avPlayer: player!)
let anchor = AnchorEntity()
anchor.addChild(screen)
content.add(anchor)
player?.play()
}
}
}
#Preview {
ContentView()
}
import SwiftUI
import SceneKit
import AVFoundation
struct VRViewContainer : UIViewRepresentable {
let sceneView = SCNView(frame: .zero)
func makeUIView(context: Context) -> SCNView {
sceneView.scene = SCNScene()
sceneView.backgroundColor = .black
sceneView.pointOfView?.position.z += 0.5
sceneView.isPlaying = true
self.loadVideoMaterial()
return sceneView
}
func loadVideoMaterial() {
guard let pathToVideo = Bundle.main.path(forResource: "video",
ofType: "mp4")
else { return }
let videoURL = URL(fileURLWithPath: pathToVideo)
let avPlayer = AVPlayer(url: videoURL)
// 16:9 video
let material = SCNMaterial()
material.diffuse.contents = avPlayer
let mesh = SCNPlane(width: 1.92, height: 1.08)
mesh.materials[0] = material
let planeModel = SCNNode(geometry: mesh)
sceneView.scene?.rootNode.addChildNode(planeModel)
avPlayer.volume = 0.05
avPlayer.play()
}
func updateUIView(_ view: SCNView, context: Context) { }
}
struct ContentView : View {
var body: some View {
VRViewContainer().ignoresSafeArea()
}
}
Upvotes: 8
Reputation: 209
I found the workaround, such as below code
self.arView = arView
let scene = SCNScene()
scnView = SCNView(frame: arView.frame)
scnView?.scene = scene
scnView?.backgroundColor = UIColor.clear
scnView?.scene?.background.contents = UIColor.clear
Then add SCN camera and set camera transform from ARFrame, such as:
let rotation = SCNMatrix4MakeRotation(.pi / 2.0, 0, 0, 1)
let cameraTransform = simd_mul(frame.camera.transform, simd_float4x4(rotation))
let projectionMatrix = SCNMatrix4(frame.camera.projectionMatrix(for: .portrait,
viewportSize: self.viewBounds!.size,
zNear: 0.001,
zFar: 10000) )
self.arCameraNode?.simdTransform = cameraTransform
self.arCameraNode?.camera?.projectionTransform = projectionMatrix
Finally, add your SCN video node into rootNode
However, there is a little shifting, I think you can wait for Realitykit to support video material.
Upvotes: 2
Reputation: 9
I might be wrong, but currently RealityKit does not support videos. A video is not a normal texture, it is a set of animated textures.
Upvotes: 0