8th wall Integration with Next.js

Hi Team, i am getting this error while integrating the 8th wall with Next.js:

TypeError: Cannot read properties of null (reading 'canvas')

Hi Neha, what behavior do you see when testing on a mobile device?
Can you please provide relevant code snippets so we can help debug?

I am testing on my localhost, it says link not found.

It’s possible that XR8 is not done loading by the time tryInitializeAR is executed. Instead of using setTimeout to call tryInitializeAR, try using the xrloaded event:

const onxrloaded = () => {
  // add camera pipeline modules
  // XR8.run()
}

window.XR8 ? onxrloaded() : window.addEventListener('xrloaded', onxrloaded)

Any time you reference XR8 in your code, it’s good practice to use this syntax to ensure the library is fully loaded.

Hi Evan,
Thanks for the reply, i am still facing the same issue

You might want to try inserting the canvas via javascript to ensure it’s in the DOM at the time XR8.run() is called.

document.body.insertAdjacentHTML('beforeend', camerafeedHtml)
const canvas = document.getElementById('camerafeed')

XR8.run({canvas})

Hi Even,
Thank you! it is working now but i ran into another issue, i am not able to see the gltf object although the box helper and other 3D elelments like box geometry gets loaded, even gltf gets loaded but not visible.
Thanks

Are there any warning or errors in the console? Are you loading the GLTFLoader in head.html? Does the model look as expected in the glTF viewer?

'use-client'
import React, { useEffect, useRef } from 'react';
import * as THREE from 'three';
import { GLTFLoader } from 'three/examples/jsm/loaders/GLTFLoader';


const CubeScene = () => {
  const canvasRef = useRef(null);

  useEffect(() => {
    window.THREE = THREE;
    let xr8Cleanup = () => {};
    var clock = new THREE.Clock();
    var mixer = new THREE.AnimationMixer(THREE.Object3D);
    const load8thWall = () => {
      const onxrloaded = () => {
        XR8.addCameraPipelineModules([ // Add XR8 WebGL pipeline modules.
          XR8.Threejs.pipelineModule(), // Enables Three.js.
          // Add more 8th Wall modules here as needed.
          
        ]);

        XR8.XrController.configure({
          canvas: canvasRef.current,
        });
        // function integrate8thWall() {
        //     XR8.Threejs.xrScene().then(({ scene, camera, renderer }) => {
        //         // Use the XR scene, camera, and renderer for Three.js
        //         window.XRExtras.Threejs.attachCamera(renderer, scene, camera);
        //         animate();
        //     });
        // }

        // Three.js scene setup:
        const scene = new THREE.Scene();
        var camera = new THREE.PerspectiveCamera( 25, window.innerWidth / window.innerHeight, 1, 20000 );

        const ambientLight = new THREE.AmbientLight(0xffffff, 0.5);
        scene.add(ambientLight);

            const directionalLight = new THREE.DirectionalLight(0xffffff, 1);
            directionalLight.position.set(0, 5, 5);
            scene.add(directionalLight);

        const renderer = new THREE.WebGLRenderer({
          canvas: canvasRef.current,
          alpha: true,
        });
        renderer.setSize(window.innerWidth, window.innerHeight);
        
        document.body.appendChild(renderer.domElement);
        renderer.outputEncoding = THREE.sRGBEncoding;

        // Add a cube to the scene:
        const geometry = new THREE.BoxGeometry();
        const material = new THREE.MeshBasicMaterial({ color: 0x00ff00 });
        const cube = new THREE.Mesh(geometry, material);
        cube.position.x = 0;
        cube.position.y = 0;
        cube.position.z = 0;
        scene.add(cube);
        const geo=new THREE.PlaneGeometry();
        const material1=new THREE.MeshBasicMaterial({color: 0x000011});
        const plane=new THREE.Mesh(geo, material1)
        plane.position.x=2;
        scene.add(plane);
         const loader = new GLTFLoader();
         console.log(loader);
         loader.load("/models/MAT29.gltf", function (gltf) {
            const box = new THREE.BoxHelper(gltf.scene, 0xffff00);
            scene.add(box);
            gltf.scene.scale.set(10, 10, 10); // Scale the model down
            gltf.scene.position.set(0, 0, 0);
            console.log(gltf)
            gltf.scene.traverse(function (child) {
                if (child.isMesh) {
                    child.material = new THREE.MeshBasicMaterial({ wireframe: true });
                    child.material.depthTest = false;
                }
              });
            mixer = new THREE.AnimationMixer( gltf.scene );
			mixer.clipAction( gltf.animations[ 0 ] ).play();
        // XR8.Scene.GLTFLoader("models/MAT29")
        //   gltf.scene.scale.set(2, 2, 2); // Scale your model to the desired size
          // Position your model or make any adjustments as needed
        }, undefined, function (error) {
          console.error(error);
          
         });
         
    
        camera.position.set( 1, 1, 20 );

    //     const animate = () => {
    //       requestAnimationFrame(animate);

    //       cube.rotation.x += 0.01;
    //       cube.rotation.y += 0.01;
    //       plane.rotation.x += 0.01;
    //       plane.rotation.y += 0.01;

    //       renderer.render(scene, camera);
    //     };

    //     animate();
    //   };
    const animate = () => {

        requestAnimationFrame( animate );

        var delta = clock.getDelta();

        if ( mixer ) mixer.update( clock.getDelta() );

        renderer.render(scene,camera);
    }

    

    animate();
}

      // Ensure XR8 is not already loaded
      if (window.XR8) {
        onxrloaded();
      } else {
        window.addEventListener('xrloaded', onxrloaded);
        xr8Cleanup = () => window.removeEventListener('xrloaded', onxrloaded);
      }
    };

    if (typeof window !== 'undefined') {
      const script = document.createElement('script');
      script.src = 'https://apps.8thwall.com/xrweb?appKey=appkey;
      script.async = true;
      script.onload = load8thWall;
      document.body.appendChild(script);
    }

    return () => {
      xr8Cleanup();
    };
  }, []);

  return <canvas ref={canvasRef}></canvas>;
};

export default CubeScene;



Model works fine in gltfviewer, also i can see model is loaded on console attached is the screen shot

Perhaps the model is too large or too small to be visible. The comment here says this line is supposed to scale the model down, but in fact it’s increasing it’s size by 10x. To reduce scale you need a value 0-1.

Hi Evan, i have tried that as well it didnt work and then i scaled it up.
Thank you

if you open up the model in blender, is it immediately visible without zooming in or out? feel free to share the model if you’re able.

No Evan, surprisingly i dont see model in blender but it works in 8th wall editor but not on self hosted application. any solution? but just now i tested with other model that was properly rendered on blender but not rendering on self hosted 8th wall app

What does the model look like in the 8th Wall editor?

Attaching the screenshot of the 8th wall simulator

Is this the exact same code as your self-hosted project, just in the cloud editor? The scene looks a bit different than in the earlier screenshot.

If you’re using three.js, be sure to follow the camera pipeline module pattern as demonstrated in our sample projects: three.js: World Effects Template | 8th Wall | 8th Wall

i.e. create an initScenePipelineModule and include it in your call to XR8.addCameraPipelineModules. In the earlier code snippet, it looks like you’re trying to set up the three.js outside of a camera pipeline module:

cloud editor code is different and self hosted code is the code snippet i provided, sure let me follow. thanks