// Copyright 2024 The Google Research Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview Main driver for web viewer. */ /** * panel for current submodel */ let gSubmodelPanel = null; let gVMemPanel = null; /** * Number of sample points per voxel. * @type {number} */ let gStepMult = 1; /** * For large scenes with varying exposure we set this value to be the exposure * of the virtual camera (shutter_speed_in_seconds * iso / 1000). * @type {number} */ let gExposure = null; /** * Loads full scene representation. * * This includes all submodel assets, including allocation and download. * * This function should be called exactly once. * * @param {string} dirUrl Either points to a directory that contains scene files * or to a json file that maps virtual filenames to * download links * @param {!object} overrideParams A dictionary that contains overrides for the * params in scene_params.json (e.g. combineMode, deferredMode or useBits). */ function loadScene(dirUrl, overrideParams) { // Check if dirUrl points to a json file or to a directory. let filenameToLinkPromise; if (dirUrl && dirUrl.includes('.json')) { // If this is the case, we fetch a JSON file that maps filenames to links. filenameToLinkPromise = loadJSONFile(dirUrl); } else { // Otherwise, the scene files directly lie at dirUrl and we create a // dummy promise that resolves immediately. filenameToLinkPromise = Promise.resolve(null); console.error('dirUrl est null ou non valide. Le fichier Json est dans le dossier'); } // This variable is defined in progressive.js. filenameToLinkPromise .then(filenameToLink => { // Mapping from fake filenames to real filenames under root directory // dirUrl. const router = new Router(dirUrl, filenameToLink); console.log('router:', router) // Loads scene parameters (voxel grid size, view-dependence MLP). const sceneParamsUrl = router.translate('scene_params.json'); console.log('sceneParamsUrl:',sceneParamsUrl); const sceneParamsPromise = loadJSONFile(sceneParamsUrl); console.log('sceneParamsPromise:',sceneParamsPromise); if (overrideParams['loadBenchmarkCameras']) { loadBenchmarkCameras(router); } // Some of the shader code is stored in seperate files. return Promise.all([sceneParamsPromise, {router, filenameToLink}]); }) .then(parsed => { // scene_params.json for this scene. // Translates filenames to full URLs. const [sceneParams, carry] = parsed; // Determine if there are multiple submodels or not. If so, figure out // how many. let initialSubmodelIndex = 0; gUseSubmodel = (sceneParams.hasOwnProperty('num_local_submodels') && sceneParams['num_local_submodels'] > 1); if (gUseSubmodel) { // Override default submodel to the user chose by URL. gSubmodelCount = sceneParams['num_local_submodels']; initialSubmodelIndex = sceneParams['sm_to_params'][sceneParams['submodel_idx']]; } // Get links to scene_params.json files for each submodel. let sceneParamsPromises = []; for (let si = 0; si < gSubmodelCount; ++si) { // Get the submodel ids participating in this scene. const submodelId = sceneParams['params_to_sm'][si]; // Find path to its scene_params.json file. const filePath = carry.router.translate( submodelAssetPath(submodelId, 'scene_params.json')); // Construct path to scene_params.json for this submodel. sceneParamsPromises.push(loadJSONFile(filePath)); } // Wait for all scene_params.json files to be loaded. return Promise.all( [{...carry, initialSubmodelIndex}, ...sceneParamsPromises]); }) .then(loaded => { let [carry, ...submodelSceneParams] = loaded; for (let si = 0; si < submodelSceneParams.length; ++si) { // Override the scene params using the URL GET variables. submodelSceneParams[si] = extend(submodelSceneParams[si], overrideParams); // Build fake-filename-to-real-filename translator for this // submodel. const submodelId = submodelSceneParams[si]['params_to_sm'][si]; let subDirUrl = dirUrl; if (gUseSubmodel) { subDirUrl = `${subDirUrl}/${submodelAssetPath(submodelId)}`; } let submodelRouter = new Router(subDirUrl, carry.filenameToLink); // Load all assets related to this submodel. This is not a blocking // operation. // TODO: Consider loading this content on-demand and using an LRU // cache to bound memory usage. let submodelContent = initializeSceneContent(submodelSceneParams[si], submodelRouter); console.log(`spec for submodel #${si}:`, submodelContent.spec); // Register submodel content with the texture manager. registerSubmodelContent(si, submodelContent); } // Now that we know the submodel scale we can set the camera pose. let si = carry.initialSubmodelIndex; setupInitialCameraPose( dirUrl, submodelCenter(si, getSubmodelContent(si).params), ); // Instantiate scene & texture buffers. return Promise.all([si, initializeDeferredMlp(si)]); }).then(([si, _]) => { return initializePingPongBuffers(si); }).then(() => { return requestAnimationFrame(renderNextFrame); }); } // Initialiser la variable pour activer/désactiver les mouvements de la caméra let disableCameraControls = false; /** * Initializes the application based on the URL parameters. */ function initFromParameters() { // HTTP GET query parameters const params = new URL(window.location.href).searchParams; console.log(params); // Base directory for all assets. const dirUrl = 'nyc/sm_004'; console.log(dirUrl); // Controls platform-specific defaults: phone, low, medium, high. Not // const as benchmark=true can override it. let quality = params.get('quality'); // Initialize lowResFactor with a default value let lowResFactor = parseInt(params.get('downscale') || 1, 10); // Number of samples per voxel. Increase for slower rendering and fewer // artifacts. const stepMult = params.get('stepMult'); if (stepMult) { gStepMult = parseInt(stepMult, 10); } const frameMult = params.get('frameMult'); if (frameMult) { gFrameMult = parseInt(frameMult, 10); } // Manually specify exposure for exposure-aware models. const exposure = params.get('exposure'); if (exposure) { gExposure = parseFloat(exposure); } // For manually overriding parameters in scene_params.json. let overrideParams = {}; const benchmarkParam = params.get('benchmark'); const benchmark = benchmarkParam && (benchmarkParam.toLowerCase() === 'time' || benchmarkParam.toLowerCase() === 'quality'); if (benchmark) { overrideParams['loadBenchmarkCameras'] = true; quality = 'high'; const sceneNameChunks = dirUrl.split('/').slice(-2); setupBenchmarkStats( sceneNameChunks[0] + '_' + sceneNameChunks[1], benchmarkParam.toLowerCase() === 'quality'); } // snerg, vfr const deferredMode = params.get('deferredMode'); if (deferredMode) { overrideParams['deferred_rendering_mode'] = deferredMode; } // sum, concat_and_sum const combineMode = params.get('combineMode'); if (combineMode && combineMode === 'concat_and_sum') { overrideParams['merge_features_combine_op'] = 'coarse_sum'; } // are occupancy grids bitpacked? const useBits = params.get('useBits'); if (useBits) { overrideParams['useBits'] = useBits.toLowerCase() === 'true'; } // Use distance grid for calculating step sizes. const useDistanceGrid = params.get('useDistanceGrid'); if (useDistanceGrid) { overrideParams['useDistanceGrid'] = useDistanceGrid.toLowerCase() === 'true'; } // Load legacy scenes, where the distance & occupancy grids are stored // as a single monolithic file. const legacyGrids = params.get('legacyGrids'); if (legacyGrids) { overrideParams['legacyGrids'] = legacyGrids.toLowerCase() === 'true'; } // Sets the activation function of the DeferredMLP. Either "relu" or "elu". // Defaults to elu. const activation = params.get('activation'); if (activation) { overrideParams['activation'] = activation; } // Whether to use feature gating for the triplanes. Either "true" or "false". // Defaults to true. const featureGating = params.get('featureGating'); if (featureGating) { overrideParams['feature_gating'] = featureGating.toLowerCase() === 'true'; } // Limit the number of cached submodel payloads. const submodelCacheSize = params.get('submodelCacheSize'); if (submodelCacheSize) { gSubmodelCacheSize = Number(submodelCacheSize); } // Merge slices of assets together before binding to WebGL texture. const mergeSlices = params.get('mergeSlices'); if (mergeSlices) { overrideParams['merge_slices'] = mergeSlices == 'true'; } // The background color (in hex, e.g. #FF0000 for red) that the scene is // rendered on top of. Defaults to medium grey. const backgroundColor = params.get('backgroundColor'); if (backgroundColor) { overrideParams['backgroundColor'] = '#' + backgroundColor; } // Créer le conteneur de la vue et ajouter la classe const view = document.createElement('div'); view.classList.add('view'); // Appliquer les styles de style.css // Ajouter le conteneur de vue dans l'élément #viewspacecontainer avant de récupérer les dimensions const viewSpaceContainer = document.getElementById('viewspacecontainer'); viewSpaceContainer.appendChild(view); // Maintenant que l'élément est dans le DOM, on peut récupérer les dimensions définies par le CSS function getCssDimensions(element) { const styles = getComputedStyle(element); const width = parseInt(styles.width, 10); const height = parseInt(styles.height, 10); return { width, height }; } const { width: frameBufferWidth, height: frameBufferHeight } = getCssDimensions(view); // Appliquer les dimensions récupérées si besoin (optionnel) view.style.width = `${frameBufferWidth}px`; view.style.height = `${frameBufferHeight}px`; // Log dimensions for debugging console.log('Width:', frameBufferWidth, 'Height:', frameBufferHeight); // Continue with the rest of the script... // Vous pouvez définir ici d'autres valeurs par défaut spécifiques // Mouse mode: Default to "fps" if not set in the URL const mouseMode = params.get('mouseMode') || 'orbit'; // No downscale factor specified, estimate it from the quality setting. let stepSizeVisibilityDelay = 0.99; if (!params.get('downscale') && quality) { let maxPixelsPerFrame = frameBufferWidth * frameBufferHeight; if (quality == 'phone') { // For iPhones. maxPixelsPerFrame = 300 * 450; stepSizeVisibilityDelay = 0.8; } else if (quality == 'low') { // For laptops with integrated GPUs. maxPixelsPerFrame = 600 * 250; stepSizeVisibilityDelay = 0.8; } else if (quality == 'medium') { // For laptops with dicrete GPUs. maxPixelsPerFrame = 1200 * 640; stepSizeVisibilityDelay = 0.95; } // else assume quality is 'high' and render at full res. while (frameBufferWidth * frameBufferHeight / lowResFactor > maxPixelsPerFrame) { lowResFactor++; } console.log('Automatically chose a downscaling factor of ' + lowResFactor); } overrideParams['useLargerStepsWhenOccluded'] = false; overrideParams['step_size_visibility_delay'] = stepSizeVisibilityDelay; // Near plane distance in world coordinates. const nearPlane = parseFloat(params.get('near') || 0.01); // FOV along screen height. Specified in degrees. const vfovy = parseFloat(params.get('vfovy') || 40.0); const viewSpace = document.querySelector('.viewspace'); viewSpace.textContent = ''; viewSpace.appendChild(view); // Créer le canvas et l'attacher à .view let canvas = document.createElement('canvas'); view.appendChild(canvas); // Appliquer les styles du parent (.view) au canvas via JavaScript canvas.style.width = '100%'; canvas.style.height = '100%'; canvas.style.border = getComputedStyle(view).border; canvas.style.borderRadius = getComputedStyle(view).borderRadius; canvas.style.boxSizing = getComputedStyle(view).boxSizing; // Add tool for visualizing framerate. gStats = Stats(); gStats.dom.style.position = 'absolute'; gStats.dom.style.display = 'none'; // Masquer complètement les stats viewSpace.appendChild(gStats.dom); gSubmodelPanel = gStats.addPanel(new Stats.Panel('SM', '#0ff', '#002')); gSubmodelPanel.update(getActiveSubmodelIndex()); gVMemPanel = gStats.addPanel(new Stats.Panel('MB VRAM', '#0ff', '#002')); gVMemPanel.update(0); // Show FPS; hide other panels. gStats.showPanel(0); // Set up a high performance WebGL context, making sure that anti-aliasing is // turned off. let gl = canvas.getContext('webgl2', { powerPreference: 'high-performance', alpha: false, stencil: true, precision: 'highp', depth: true, antialias: false, desynchronized: false, preserveDrawingBuffer: benchmarkParam && benchmarkParam.toLowerCase() === 'quality', }); gl.enable(gl.DEPTH_TEST); // Activer le test de profondeur gl.depthFunc(gl.LEQUAL); // Spécifier comment les comparaisons de profondeur doivent être effectuées gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1); gl.viewport(0, 0, canvas.width, canvas.height); gRenderer = new THREE.WebGLRenderer({ canvas: canvas, context: gl, }); // Set up the normal scene used for rendering. gCamera = new THREE.PerspectiveCamera( vfovy, // Vertical field of view (kept from your original setup) Math.trunc(view.offsetWidth / lowResFactor) / Math.trunc(view.offsetHeight / lowResFactor), // Aspect ratio (adjusted based on view size) nearPlane, // Near clipping plane (keep your original value) 2000 // Far clipping plane, increased from 100.0 to 2000. Adjust as needed for your scene ); gCamera.updateProjectionMatrix(); // Expose gCamera to global scope window.sceneCamera = gCamera; // Set up progressive rendering and renderer size (keeping your existing code) setupProgressiveRendering(view, lowResFactor); gRenderer.autoClear = false; gRenderer.setSize(view.offsetWidth, view.offsetHeight); // Enable depth testing gRenderer.setClearColor(0x000000, 1); setupCameraControls(mouseMode, view); // Using the default 'fps' if not specified let width = Math.trunc(view.offsetWidth / lowResFactor); let height = Math.trunc(view.offsetHeight / lowResFactor); setupViewport(width, height); loadScene(dirUrl, overrideParams) } let sphereAdded = false; /** * The main update function that gets called every frame. * * @param {number} t elapsed time between frames (ms). */ function renderNextFrame(t) { // Delete old submodels to keep memory usage in check. garbageCollectSubmodelPayloads(); // Attempt to set the current ray march scene. This will kick off the process // of instantiating a new scene if necessary. let submodelIndex = positionToSubmodel(gCamera.position, getActiveSubmodelContent().params); setCurrentRayMarchScene(submodelIndex); // setCurrentRayMarchScene() may not actually change the scene. Use the // index of the current active submodel instead. submodelIndex = getActiveSubmodelIndex(); let sceneParams = getSubmodelContent(submodelIndex).params; for (let i = 0; i < gFrameMult; ++i) { gSubmodelTransform = submodelTransform(submodelIndex, sceneParams); gSubmodelPanel.update(submodelIndex); gVMemPanel.update(getCurrentTextureUsageInBytes() / 1e6); // Condition pour désactiver les mouvements de la caméra lors du hover if (!disableCameraControls) { updateCameraControls(); // Mettre à jour les contrôles de la caméra uniquement si hover désactivé } // For benchmarking, we want to direcly set the projection matrix. if (!gBenchmark) { gCamera.updateProjectionMatrix(); } gCamera.updateMatrixWorld(); const currentSubmodelCenter = submodelCenter(submodelIndex, sceneParams); const submodelScale = getSubmodelScale(submodelIndex); let submodelCameraPosition = new THREE.Vector3().copy(gCamera.position); submodelCameraPosition.sub(currentSubmodelCenter); submodelCameraPosition.multiplyScalar(submodelScale); let shaderUniforms = getRayMarchScene().children[0].material.uniforms; // Make sure to free up GPU memory from the previous frames. if (!!shaderUniforms['weightsZero']['value']) { shaderUniforms['weightsZero']['value'].dispose(); } if (!!shaderUniforms['weightsOne']['value']) { shaderUniforms['weightsOne']['value'].dispose(); } if (!!shaderUniforms['weightsTwo']['value']) { shaderUniforms['weightsTwo']['value'].dispose(); } shaderUniforms['bias_0']['value'] = trilerpDeferredMlpBiases(submodelIndex, 0, submodelCameraPosition); shaderUniforms['bias_1']['value'] = trilerpDeferredMlpBiases(submodelIndex, 1, submodelCameraPosition); shaderUniforms['bias_2']['value'] = trilerpDeferredMlpBiases(submodelIndex, 2, submodelCameraPosition); shaderUniforms['weightsZero']['value'] = trilerpDeferredMlpKernel(submodelIndex, 0, submodelCameraPosition); shaderUniforms['weightsOne']['value'] = trilerpDeferredMlpKernel(submodelIndex, 1, submodelCameraPosition); shaderUniforms['weightsTwo']['value'] = trilerpDeferredMlpKernel(submodelIndex, 2, submodelCameraPosition); gRenderer.clear(); renderProgressively(); // Clear depth buffer to avoid depth conflicts gRenderer.render(gSphereScene, gCamera); } gStats.update(); // By default we schedule the next frame ASAP, but the benchmark mode can // override this by replacing this lambda. let scheduleNextFrame = () => { requestAnimationFrame(renderNextFrame); }; if (gBenchmark) { scheduleNextFrame = benchmarkPerformance(scheduleNextFrame); } scheduleNextFrame(); } /** * Starts the volumetric scene viewer application. */ function start() { initFromParameters(); addHandlers(); // Initialiser la caméra et le renderer avant d'utiliser gCamera et gRenderer if (!gCamera || !gRenderer) { console.error("gCamera ou gRenderer non initialisés."); return; } // Après avoir initialisé gCamera et gRenderer addInteractionPlane(); setupNewPlaneGizmo(newPlane); setupMouseHover(); // Ajouter les écouteurs d'événements pour les mouvements de la souris et les clics window.addEventListener('mousemove', onMouseMove, false); window.addEventListener('mousedown', onMouseDown, false); window.addEventListener('mouseup', onMouseUp, false); } window.onload = start;