render method
Implementation
void render(Object3D scene, Camera camera) {
if (_isContextLost) return;
// update scene graph
if (scene.matrixWorldAutoUpdate) scene.updateMatrixWorld();
// update camera matrices and frustum
if (camera.parent == null && camera.matrixWorldAutoUpdate) camera.updateMatrixWorld();
if ( xr.enabled && xr.isPresenting ) {
if (xr.cameraAutoUpdate) xr.updateCamera( camera );
if(kIsWeb) camera = xr.getCamera();
}
if (scene is Scene) {
scene.onBeforeRender?.call(renderer: this, scene: scene, camera: camera, renderTarget: _currentRenderTarget);
}
currentRenderState = renderStates.get(scene, renderCallDepth: renderStateStack.length);
currentRenderState!.init(camera);
renderStateStack.add(currentRenderState!);
projScreenMatrix.multiply2(camera.projectionMatrix, camera.matrixWorldInverse);
_frustum.setFromMatrix(projScreenMatrix);
_localClippingEnabled = localClippingEnabled;
_clippingEnabled = clipping.init(clippingPlanes, _localClippingEnabled);
currentRenderList = renderLists.get(scene, renderListStack.length);
currentRenderList!.init();
renderListStack.add(currentRenderList!);
if ( xr.enabled && xr.isPresenting) {
final depthSensingMesh = xr.getDepthSensingMesh();
if ( depthSensingMesh != null ) {
projectObject( depthSensingMesh, camera, - double.maxFinite.toInt(), this.sortObjects );
}
}
projectObject(scene, camera, 0, sortObjects);
currentRenderList!.finish();
if (sortObjects) {
currentRenderList!.sort(_opaqueSort, _transparentSort);
}
renderBackground = !xr.enabled || !xr.isPresenting || !xr.hasDepthSensing();
if ( renderBackground ) {
background.addToRenderList( currentRenderList!, scene );
}
info.render['frame'] = info.render['frame']!+1;
if (_clippingEnabled) clipping.beginShadows();
final shadowsArray = currentRenderState!.state.shadowsArray;
if(kIsWeb){
shadowMap.render(shadowsArray, scene, camera);
}
if (_clippingEnabled) clipping.endShadows();
if (info.autoReset) info.reset();
// render scene
final opaqueObjects = currentRenderList?.opaque;
final transmissiveObjects = currentRenderList?.transmissive;
currentRenderState!.setupLights(physicallyCorrectLights);
if (camera is ArrayCamera) {
final cameras = camera.cameras;
if (transmissiveObjects != null && transmissiveObjects.isNotEmpty) {
for (int i = 0, l = cameras.length; i < l; i ++ ) {
final camera2 = cameras[ i ];
renderTransmissionPass( opaqueObjects!, transmissiveObjects, scene, camera2);
}
}
if (renderBackground) background.render(scene);
for (int i = 0, l = cameras.length; i < l; i++) {
final camera2 = cameras[i];
renderScene(currentRenderList!, scene, camera2, camera2.viewport);
}
}
else {
if ( renderBackground ) background.render( scene );
renderScene(currentRenderList!, scene, camera);
if(transmissiveObjects != null && transmissiveObjects.isNotEmpty) renderTransmissionPass( opaqueObjects!, transmissiveObjects, scene, camera );
}
if(!kIsWeb){
shadowMap.render(shadowsArray, scene, camera);
}
if (_currentRenderTarget != null) {
// resolve multisample renderbuffers to a single-sample texture if necessary
textures.updateMultisampleRenderTarget(_currentRenderTarget!);
// Generate mipmap if we're using any kind of mipmap filtering
textures.updateRenderTargetMipmap(_currentRenderTarget!);
}
if (scene is Scene) {
scene.onAfterRender?.call(renderer: this, scene: scene, camera: camera);
}
_gl.flush();
bindingStates.resetDefaultState();
_currentMaterialId = -1;
_currentCamera = null;
renderStateStack.removeLast();
if (renderStateStack.isNotEmpty) {
currentRenderState = renderStateStack[renderStateStack.length - 1];
if (_clippingEnabled) clipping.setGlobalState(clippingPlanes, currentRenderState!.state.camera! );
}
else {
currentRenderState = null;
}
renderListStack.removeLast();
if (renderListStack.isNotEmpty) {
currentRenderList = renderListStack[renderListStack.length - 1];
}
else {
currentRenderList = null;
}
}