onAnimationFrame method

void onAnimationFrame(
  1. double time,
  2. XRFrame frame
)

Implementation

void onAnimationFrame(double time, XRFrame frame ) {
  pose = frame.getViewerPose( customReferenceSpace ?? referenceSpace);
  xrFrame = frame;

  if ( pose != null ) {
    final views = pose!.views.toDart;
    if ( glBaseLayer != null ) {
      state.bindXRFramebuffer( Framebuffer(glBaseLayer!.framebuffer) );
    }

    bool cameraVRNeedsUpdate = false;

    // check if it's necessary to rebuild cameraVR's camera list
    if ( views.length != cameraVR.cameras.length ) {
      cameraVR.cameras.length = 0;
      cameraVRNeedsUpdate = true;
    }

    for (int i = 0; i < views.length; i ++ ) {
      final view = views[ i ];
      XRViewport? viewport;

      if ( glBaseLayer != null ) {
        viewport = glBaseLayer?.getViewport( view );
      }
      else {
        final glSubImage = glBinding?.getViewSubImage( glProjLayer!, view );
        state.bindXRFramebuffer( glFramebuffer );

        if ( glSubImage?.depthStencilTexture != null ) {
          gl.framebufferTexture2D(
            WebGL.FRAMEBUFFER,
            depthStyle,
            WebGL.TEXTURE_2D,
            WebGLTexture(glSubImage!.depthStencilTexture),
            0
          );
        }

        gl.framebufferTexture2D(
          WebGL.FRAMEBUFFER,
          WebGL.COLOR_ATTACHMENT0,
          WebGL.TEXTURE_2D,
          WebGLTexture(glSubImage!.colorTexture),
          0
        );
        viewport = glSubImage.viewport;
      }

      final camera = cameras[ i ];
      camera.matrix.copyFromUnknown( view.transform.matrix.dartify());
      camera.projectionMatrix.copyFromUnknown( view.projectionMatrix.dartify()  );
      camera.viewport?.setValues( viewport!.x, viewport.y, viewport.width, viewport.height);

      if ( i == 0 ) {
        cameraVR.matrix.setFrom( camera.matrix );
      }

      if ( cameraVRNeedsUpdate == true ) {
        cameraVR.cameras.add( camera );
      }
    }

    if ( isMultisample ) {
      state.bindXRFramebuffer( glMultisampledFramebuffer );
      if ( clearStyle != 0) gl.clear( clearStyle );
    }
  }

  //
  final inputSources = session!.inputSources!.toDart;

  for (int i = 0; i < controllers.length; i ++ ) {
    final controller = controllers[ i ];
    final inputSource = inputSources.isNotEmpty?inputSources[ i ]:null;
    if ( inputSource != null) {
      controller.update( inputSource, frame, customReferenceSpace ?? referenceSpace );
    }
  }

  if ( onAnimationFrameCallback != null) onAnimationFrameCallback( time, frame );

  if ( isMultisample ) {
    final width = glProjLayer!.textureWidth;
    final height = glProjLayer!.textureHeight;

    state.bindFramebuffer( WebGL.READ_FRAMEBUFFER, glMultisampledFramebuffer );
    state.bindFramebuffer( WebGL.DRAW_FRAMEBUFFER, glFramebuffer );
    // Invalidate the depth here to avoid flush of the depth data to main memory.
    gl.invalidateFramebuffer( WebGL.READ_FRAMEBUFFER, [ depthStyle ] );
    gl.invalidateFramebuffer( WebGL.DRAW_FRAMEBUFFER, [ depthStyle ] );
    gl.blitFramebuffer( 0, 0, width, height, 0, 0, width, height, WebGL.COLOR_BUFFER_BIT, WebGL.NEAREST );
    // Invalidate the MSAA buffer because it's not needed anymore.
    gl.invalidateFramebuffer( WebGL.READ_FRAMEBUFFER, [ WebGL.COLOR_ATTACHMENT0 ] );
    state.bindFramebuffer( WebGL.READ_FRAMEBUFFER, null );
    state.bindFramebuffer( WebGL.DRAW_FRAMEBUFFER, null );

    state.bindFramebuffer( WebGL.FRAMEBUFFER, glMultisampledFramebuffer );
  }

  xrFrame = null;
}