Reputation: 8607
I'm adding some visual elements to my app with SceneKit shader modifiers like this:
// A SceneKit scene with orthographic projection
let shaderBundle = Bundle(for: Self.self)
let shaderUrl = shaderBundle.url(forResource: "MyShader.frag", withExtension: nil)!
let shaderString = try! String(contentsOf: shaderUrl)
let plane = SCNPlane(width: 512, height: 512) // 1024x1024 pixels on devices with x2 screen resolution
plane.firstMaterial!.shaderModifiers = [SCNShaderModifierEntryPoint.fragment: shaderString]
let planeNode = SCNNode(geometry: plane)
rootNode.addChildNode(planeNode)
The problem is slow performance because SceneKit is painstakingly rendering every single pixel of the plane that's screening the shader. How do I decrease the resolution of the shader keeping the plain's size unchanged?
I've already tried making plane
smaller and using an enlarging scale transformation on planeNode
but fruitless, the rendition of the shader remained as highly detailed as before.
Using plane.firstMaterial!.diffuse.contentsTransform
didn't help either (or maybe I was doing it wrong).
I know I could make the global SCNView
smaller and then apply an affine scale transform if that shader was the only node in the scene but it's not, there are other nodes (that aren't shaders) in the same scene and I'd prefer to avoid altering their appearance in any way.
Upvotes: 3
Views: 843
Reputation: 8607
Seems like I managed to solve it using a sort of "render to texture" approach by nesting a SceneKit scene inside a SpriteKit scene being displayed by the top level SceneKit scene.
Going into more detail, the following subclass of SCNNode
is placing a downscaled shader plane within a SpriteKit's SK3DNode
, then taking that SK3DNode
and putting it inside a SpriteKit scene as a SceneKit's SKScene
, and then using that SKScene
as the diffuse contents of an upscaled plane put inside the top level SceneKit scene.
Strangely, for keeping the native resolution I need to use scaleFactor*2
, so for halving the rendering resolution (normally scale factor 0.5) I actually need to use scaleFactor = 1
.
If anyone happens to know the reason for this strange behavior or a workaround for it, please let me know in a comment.
import Foundation
import SceneKit
import SpriteKit
class ScaledResolutionFragmentShaderModifierPlaneNode: SCNNode {
private static let nestedSCNSceneFrustumLength: CGFloat = 8
// For shader parameter input
let shaderPlaneMaterial: SCNMaterial
// shaderModifier: the shader
// planeSize: the size of the shader on the screen
// scaleFactor: the scale to be used for the shader's rendering resolution; the lower, the faster
init(shaderModifier: String, planeSize: CGSize, scaleFactor: CGFloat) {
let scaledSize = CGSize(width: planeSize.width*scaleFactor, height: planeSize.height*scaleFactor)
// Nested SceneKit scene with orthographic projection
let nestedSCNScene = SCNScene()
let camera = SCNCamera()
camera.zFar = Double(Self.nestedSCNSceneFrustumLength)
camera.usesOrthographicProjection = true
camera.orthographicScale = Double(scaledSize.height/2)
let cameraNode = SCNNode()
cameraNode.camera = camera
cameraNode.simdPosition = simd_float3(x: 0, y: 0, z: Float(Self.nestedSCNSceneFrustumLength/2))
nestedSCNScene.rootNode.addChildNode(cameraNode)
let shaderPlane = SCNPlane(width: scaledSize.width, height: scaledSize.height)
shaderPlaneMaterial = shaderPlane.firstMaterial!
shaderPlaneMaterial.shaderModifiers = [SCNShaderModifierEntryPoint.fragment: shaderModifier]
let shaderPlaneNode = SCNNode(geometry: shaderPlane)
nestedSCNScene.rootNode.addChildNode(shaderPlaneNode)
// Intermediary SpriteKit scene
let nestedSCNSceneSKNode = SK3DNode(viewportSize: scaledSize)
nestedSCNSceneSKNode.scnScene = nestedSCNScene
nestedSCNSceneSKNode.position = CGPoint(x: scaledSize.width/2, y: scaledSize.height/2)
nestedSCNSceneSKNode.isPlaying = true
let intermediarySKScene = SKScene(size: scaledSize)
intermediarySKScene.backgroundColor = .clear
intermediarySKScene.addChild(nestedSCNSceneSKNode)
let intermediarySKScenePlane = SCNPlane(width: scaledSize.width, height: scaledSize.height)
intermediarySKScenePlane.firstMaterial!.diffuse.contents = intermediarySKScene
let intermediarySKScenePlaneNode = SCNNode(geometry: intermediarySKScenePlane)
let invScaleFactor = 1/Float(scaleFactor)
intermediarySKScenePlaneNode.simdScale = simd_float3(x: invScaleFactor, y: invScaleFactor, z: 1)
super.init()
addChildNode(intermediarySKScenePlaneNode)
}
required init?(coder: NSCoder) {
fatalError()
}
}
Upvotes: 2
Reputation: 57149
In general, without a fairly new GPU feature called variable rasterization rate in Metal or variable rate shading elsewhere, you can’t make one object in a scene run its fragment shader at a different resolution than the rest of the scene.
For this case, depending on what your setup is, you might be able to use SCNTechnique to render the plane in a separate pass at a different resolution, then composite that back into your scene, in the same way some game engines render particles at a lower resolution to save on fill rate. Here’s an example.
First, you’ll need a Metal file in your project (if you already have one, just add to it), containing the following:
#include <SceneKit/scn_metal>
struct QuadVertexIn {
float3 position [[ attribute(SCNVertexSemanticPosition) ]];
float2 uv [[ attribute(SCNVertexSemanticTexcoord0) ]];
};
struct QuadVertexOut {
float4 position [[ position ]];
float2 uv;
};
vertex QuadVertexOut quadVertex(QuadVertexIn v [[ stage_in ]]) {
QuadVertexOut o;
o.position = float4(v.position.x, -v.position.y, 1, 1);
o.uv = v.uv;
return o;
}
constexpr sampler compositingSampler(coord::normalized, address::clamp_to_edge, filter::linear);
fragment half4 compositeFragment(QuadVertexOut v [[ stage_in ]], texture2d<half, access::sample> compositeInput [[ texture(0) ]]) {
return compositeInput.sample(compositingSampler, v.uv);
}
Then, in your SceneKit code, you can set up and apply the technique like this:
let technique = SCNTechnique(dictionary: [
"passes": ["drawLowResStuff":
["draw": "DRAW_SCENE",
// only draw nodes that are in this category
"includeCategoryMask": 2,
"colorStates": ["clear": true, "clearColor": "0.0"],
"outputs": ["color": "lowResStuff"]],
"drawScene":
["draw": "DRAW_SCENE",
// don’t draw nodes that are in the low-res-stuff category
"excludeCategoryMask": 2,
"colorStates": ["clear": true, "clearColor": "sceneBackground"],
"outputs": ["color": "COLOR"]],
"composite":
["draw": "DRAW_QUAD",
"metalVertexShader": "quadVertex",
"metalFragmentShader": "compositeFragment",
// don’t clear what’s currently there (the rest of the scene)
"colorStates": ["clear": false],
// use alpha blending
"blendStates": ["enable": true, "colorSrc": "srcAlpha", "colorDst": "oneMinusSrcAlpha"],
// supply the lowResStuff render target to the fragment shader
"inputs": ["compositeInput": "lowResStuff"],
// draw into the main color render target
"outputs": ["color": "COLOR"]]
],
"sequence": ["drawLowResStuff", "drawScene", "composite"],
"targets": ["lowResStuff": ["type": "color", "scaleFactor": 0.5]]
])
// mark the plane node as belonging to the category of stuff that gets drawn in the low-res pass
myPlaneNode.categoryBitMask = 2
// apply the technique to the scene view
mySceneView.technique = technique
With a test scene consisting of two spheres with the same texture, and the scaleFactor
set to 0.25 instead of 0.5 to exaggerate the effect, the result looks like this.
If you’d prefer sharp pixelation instead of the blurrier resizing depicted above, change filter::linear
to filter::nearest
in the Metal code. Also, note that the low-res content being composited in is not taking into account the depth buffer, so if your plane is supposed to appear “behind” other objects then you’ll have to do some more work in the compositing function to fix that.
Upvotes: 1