brianzinn / react-babylonjs

React for Babylon 3D engine
https://brianzinn.github.io/react-babylonjs/
818 stars 105 forks source link

How do I pass a camera into a postProcess? #176

Closed tristanbuckner closed 2 years ago

tristanbuckner commented 2 years ago

I'm attempting to refactor a working babylonjs playground into react so I can use it with an existing project, but I'm running into some problems with getting everything to line up with the declarative style.

So I have a fragment shader added to Effect.ShadersStore and I'm trying to apply it as a post processing step but this requires a camera to be passed in. How can I get "camera1" wired in correctly (I'm a react novice and don't know if I should be calling these components or what)?

          <Engine antialias={true} adaptToDeviceRatio={true} canvasId="sample-canvas">
            <Scene>
              <freeCamera name={'camera1'} position={new Vector3(0, 0, 10)} target={Vector3.Zero()}/>
              <hemisphericLight name='light' direction={new Vector3(0, 0, -1)} intensity={1}  />
              <postProcess name={'CRTShaderPostProcess'} fragmentUrl={'crt'} parameters={["curvature", "screenResolution", "scanLineOpacity"]} samplers={[]} options={0.25} />
              <plane name="screen" width={16} height={9} position={new Vector3(0, 0, 0)} >
              </plane>
            </Scene>
          </Engine>
brianzinn commented 2 years ago

hi @tristanbuckner - What's the imperative code to pass it through? It's not uncommon that this library needs to be extended to support different use cases, so it's easier to work backwards from the imperative sometimes to arrive at the solution also because not all properties are supported by the renderer.

tristanbuckner commented 2 years ago

Gotcha. So this is the product of me hacking together two babylonjs demos, one that applies a crt filter to a scene and one that plays streaming video as a texture on a plane.

var url = "https://cdn.jsdelivr.net/npm/hls.js@latest";
var s = document.createElement("script");
s.type = "text/javascript";
s.src = url;
document.head.appendChild(s);

var createScene = function () {

    engine.displayLoadingUI();
    var stream1 = "https://etlive-mediapackage-fastly.cbsaavideo.com/dvr/manifest.m3u8";
    var video = $("<video autoplay playsinline src='"+stream1+"'></video>");
    $("body").append(video);
    console.log("Adding HTML video element");

    // This creates a basic Babylon Scene object (non-mesh)
    var scene = new BABYLON.Scene(engine);

    // This creates and positions a free camera (non-mesh)
    var camera = new BABYLON.FreeCamera("camera1", new BABYLON.Vector3(0, 0, 10), scene);

    // This targets the camera to scene origin
    camera.setTarget(BABYLON.Vector3.Zero());

    // This creates a light, aiming 0,1,0 - to the sky (non-mesh)
    var light = new BABYLON.HemisphericLight("light", new BABYLON.Vector3(0, 1, 0), scene);

    // Default intensity is 1. Let's dim the light a small amount
    light.intensity = 1;

    // Our built-in 'ground' shape. (Edit: I'm using this as the raw video texture)
    var TV = BABYLON.MeshBuilder.CreatePlane("screen", {width: 16, height: 9}, scene);

    BABYLON.Effect.ShadersStore["crtFragmentShader"] = `
    #ifdef GL_ES
        precision highp float;
    #endif

    #define PI 3.1415926538

    // Samplers
    varying vec2 vUV;
    uniform sampler2D textureSampler;

    // Parameters
    uniform vec2 curvature;
    uniform vec2 screenResolution;
    uniform vec2 scanLineOpacity;

    vec2 curveRemapUV(vec2 uv)
    {
        // as we near the edge of our screen apply greater distortion using a sinusoid.

        uv = uv * 2.0 - 1.0;
        vec2 offset = abs(uv.yx) / vec2(curvature.x, curvature.y);
        uv = uv + uv * offset * offset;
        uv = uv * 0.5 + 0.5;
        return uv;
    }

    vec4 scanLineIntensity(float uv, float resolution, float opacity)
    {
        float intensity = sin(uv * resolution * PI * 2.0);
        intensity = ((0.5 * intensity) + 0.5) * 0.9 + 0.1;
        return vec4(vec3(pow(intensity, opacity)), 1.0);
    }

    void main(void) 
    {
        vec2 remappedUV = curveRemapUV(vec2(vUV.x, vUV.y));
        vec4 baseColor = texture2D(textureSampler, remappedUV);

        baseColor *= scanLineIntensity(remappedUV.x, screenResolution.y, scanLineOpacity.x);
        baseColor *= scanLineIntensity(remappedUV.y, screenResolution.x, scanLineOpacity.y);

        if (remappedUV.x < 0.0 || remappedUV.y < 0.0 || remappedUV.x > 1.0 || remappedUV.y > 1.0){
            gl_FragColor = vec4(0.0, 0.0, 0.0, 1.0);
        } else {
            gl_FragColor = baseColor;
        }
    }
    `;

    var postProcess = new BABYLON.PostProcess("CRTShaderPostProcess", "crt", ["curvature", "screenResolution", "scanLineOpacity"], null, 0.25, camera);
    postProcess.onApply = function (effect) {
        effect.setFloat2("curvature", 4.0, 4.0);
        effect.setFloat2("screenResolution", 720, 480);
        effect.setFloat2("scanLineOpacity", .2, 1);
    };

    TV.actionManager = new BABYLON.ActionManager(scene);    

    s.onload = function() {
        // Video material
        videoMat = new BABYLON.StandardMaterial("textVid", scene);
        var video = document.querySelector('video');
        var videoTexture = new BABYLON.VideoTexture('video', video, scene, true, true);    

        videoMat.backFaceCulling = false;
        videoMat.ambientTexture = videoTexture;
        videoMat.emissiveColor = BABYLON.Color3.White();
        videoMat.ambientTexture.wAng = Math.PI; 

        TV.material = videoMat;
        var htmlVideo = videoTexture.video;

        if (Hls.isSupported()) {
            var hls = new Hls();
            hls.loadSource(stream1);
            hls.attachMedia(video);
            engine.hideLoadingUI();
            hls.on(Hls.Events.MANIFEST_PARSED,function() {
                TV.actionManager.registerAction(
                    new BABYLON.ExecuteCodeAction(BABYLON.ActionManager.OnPickTrigger, 
                    function(event) {
                        htmlVideo.play();               
                    })
                );
            });
        } else if (video.canPlayType('application/vnd.apple.mpegurl')) {
            video.src = stream1;
            engine.hideLoadingUI();
            video.addEventListener('loadedmetadata',function() {
                TV.actionManager.registerAction(
                    new BABYLON.ExecuteCodeAction(BABYLON.ActionManager.OnPickTrigger, 
                    function(event) {
                        htmlVideo.play();               
                    })
                );
            });
        }
    } 

    return scene;
};
brianzinn commented 2 years ago

I think this should work - I haven't tested it I am just writing it here in the issue. You need to work with the camera reference and pass it into the <postProcess ../>, but useRef doesn't trigger a re-render and there are different ways around that. One way is with useState, but there are other ways with layout.

import { useRef, useEffect } from 'react';

const YourScene = () => {
  const cameraRef = useRef(null);
  const [cameraReady, setCameraReady] = useState(false);
  useEffect(() => {
    setCameraReady(cameraRef.current !== null);
  }, [cameraRef.current]);

  return (
    <Engine antialias={true} adaptToDeviceRatio={true} canvasId="sample-canvas">
      <Scene>
        {/* get camera reference with "ref=.." */}
        <freeCamera name={'camera1'} ref={cameraRef} position={new Vector3(0, 0, 10)} target={Vector3.Zero()}/>
        {cameraReady &&
          <postProcess name={'CRTShaderPostProcess'} fragmentUrl={'crt'} parameters={["curvature", "screenResolution", "scanLineOpacity"]} samplers={[]} options={0.25} camera={cameraRef.current} />
         }
       </Scene>
  </Engine>
)}

The camera should be passed in as it is a constructor parameter (I checked the code), but the constructor parameters (with new) are only passed in and used the first time the postProcess element is rendered. I did check the reconciler and it looks like that will work.

It will be good for me to make a storybook with a working example - let me know if that works or otherwise I will try to make an example tonight.

brianzinn commented 2 years ago

closing for housekeeping purposes. please re-open if this is not solved for you.