// Copyright 2024 The Google Research Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /* * Extracted from pdf.js * https://github.com/andreasgal/pdf.js * * Copyright (c) 2011 Mozilla Foundation * * Contributors: Andreas Gal * Chris G Jones * Shaon Barman * Vivien Nicolas <21@vingtetun.org> * Justin D'Arcangelo * Yury Delendik * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. * * This implementation slightly deviates from that of * https://unpkg.com/png-js@1.0.0/zlib.js in that references to "document" * and "window" have been removed. * */ let DecodeStream = (function() { function constructor() { this.pos = 0; this.bufferLength = 0; this.eof = false; this.buffer = null; } constructor.prototype = { ensureBuffer: function decodestream_ensureBuffer(requested) { var buffer = this.buffer; var current = buffer ? buffer.byteLength : 0; if (requested < current) return buffer; var size = 512; while (size < requested) size <<= 1; var buffer2 = new Uint8Array(size); for (var i = 0; i < current; ++i) buffer2[i] = buffer[i]; return this.buffer = buffer2; }, getByte: function decodestream_getByte() { var pos = this.pos; while (this.bufferLength <= pos) { if (this.eof) return null; this.readBlock(); } return this.buffer[this.pos++]; }, getBytes: function decodestream_getBytes(length) { var pos = this.pos; if (length) { this.ensureBuffer(pos + length); var end = pos + length; while (!this.eof && this.bufferLength < end) this.readBlock(); var bufEnd = this.bufferLength; if (end > bufEnd) end = bufEnd; } else { while (!this.eof) this.readBlock(); var end = this.bufferLength; } this.pos = end; return this.buffer.subarray(pos, end); }, lookChar: function decodestream_lookChar() { var pos = this.pos; while (this.bufferLength <= pos) { if (this.eof) return null; this.readBlock(); } return String.fromCharCode(this.buffer[this.pos]); }, getChar: function decodestream_getChar() { var pos = this.pos; while (this.bufferLength <= pos) { if (this.eof) return null; this.readBlock(); } return String.fromCharCode(this.buffer[this.pos++]); }, makeSubStream: function decodestream_makeSubstream(start, length, dict) { var end = start + length; while (this.bufferLength <= end && !this.eof) this.readBlock(); return new Stream(this.buffer, start, length, dict); }, skip: function decodestream_skip(n) { if (!n) n = 1; this.pos += n; }, reset: function decodestream_reset() { this.pos = 0; } }; return constructor; })(); let FlateStream = (function() { var codeLenCodeMap = new Uint32Array([ 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 ]); var lengthDecode = new Uint32Array([ 0x00003, 0x00004, 0x00005, 0x00006, 0x00007, 0x00008, 0x00009, 0x0000a, 0x1000b, 0x1000d, 0x1000f, 0x10011, 0x20013, 0x20017, 0x2001b, 0x2001f, 0x30023, 0x3002b, 0x30033, 0x3003b, 0x40043, 0x40053, 0x40063, 0x40073, 0x50083, 0x500a3, 0x500c3, 0x500e3, 0x00102, 0x00102, 0x00102 ]); var distDecode = new Uint32Array([ 0x00001, 0x00002, 0x00003, 0x00004, 0x10005, 0x10007, 0x20009, 0x2000d, 0x30011, 0x30019, 0x40021, 0x40031, 0x50041, 0x50061, 0x60081, 0x600c1, 0x70101, 0x70181, 0x80201, 0x80301, 0x90401, 0x90601, 0xa0801, 0xa0c01, 0xb1001, 0xb1801, 0xc2001, 0xc3001, 0xd4001, 0xd6001 ]); var fixedLitCodeTab = [new Uint32Array([ 0x70100, 0x80050, 0x80010, 0x80118, 0x70110, 0x80070, 0x80030, 0x900c0, 0x70108, 0x80060, 0x80020, 0x900a0, 0x80000, 0x80080, 0x80040, 0x900e0, 0x70104, 0x80058, 0x80018, 0x90090, 0x70114, 0x80078, 0x80038, 0x900d0, 0x7010c, 0x80068, 0x80028, 0x900b0, 0x80008, 0x80088, 0x80048, 0x900f0, 0x70102, 0x80054, 0x80014, 0x8011c, 0x70112, 0x80074, 0x80034, 0x900c8, 0x7010a, 0x80064, 0x80024, 0x900a8, 0x80004, 0x80084, 0x80044, 0x900e8, 0x70106, 0x8005c, 0x8001c, 0x90098, 0x70116, 0x8007c, 0x8003c, 0x900d8, 0x7010e, 0x8006c, 0x8002c, 0x900b8, 0x8000c, 0x8008c, 0x8004c, 0x900f8, 0x70101, 0x80052, 0x80012, 0x8011a, 0x70111, 0x80072, 0x80032, 0x900c4, 0x70109, 0x80062, 0x80022, 0x900a4, 0x80002, 0x80082, 0x80042, 0x900e4, 0x70105, 0x8005a, 0x8001a, 0x90094, 0x70115, 0x8007a, 0x8003a, 0x900d4, 0x7010d, 0x8006a, 0x8002a, 0x900b4, 0x8000a, 0x8008a, 0x8004a, 0x900f4, 0x70103, 0x80056, 0x80016, 0x8011e, 0x70113, 0x80076, 0x80036, 0x900cc, 0x7010b, 0x80066, 0x80026, 0x900ac, 0x80006, 0x80086, 0x80046, 0x900ec, 0x70107, 0x8005e, 0x8001e, 0x9009c, 0x70117, 0x8007e, 0x8003e, 0x900dc, 0x7010f, 0x8006e, 0x8002e, 0x900bc, 0x8000e, 0x8008e, 0x8004e, 0x900fc, 0x70100, 0x80051, 0x80011, 0x80119, 0x70110, 0x80071, 0x80031, 0x900c2, 0x70108, 0x80061, 0x80021, 0x900a2, 0x80001, 0x80081, 0x80041, 0x900e2, 0x70104, 0x80059, 0x80019, 0x90092, 0x70114, 0x80079, 0x80039, 0x900d2, 0x7010c, 0x80069, 0x80029, 0x900b2, 0x80009, 0x80089, 0x80049, 0x900f2, 0x70102, 0x80055, 0x80015, 0x8011d, 0x70112, 0x80075, 0x80035, 0x900ca, 0x7010a, 0x80065, 0x80025, 0x900aa, 0x80005, 0x80085, 0x80045, 0x900ea, 0x70106, 0x8005d, 0x8001d, 0x9009a, 0x70116, 0x8007d, 0x8003d, 0x900da, 0x7010e, 0x8006d, 0x8002d, 0x900ba, 0x8000d, 0x8008d, 0x8004d, 0x900fa, 0x70101, 0x80053, 0x80013, 0x8011b, 0x70111, 0x80073, 0x80033, 0x900c6, 0x70109, 0x80063, 0x80023, 0x900a6, 0x80003, 0x80083, 0x80043, 0x900e6, 0x70105, 0x8005b, 0x8001b, 0x90096, 0x70115, 0x8007b, 0x8003b, 0x900d6, 0x7010d, 0x8006b, 0x8002b, 0x900b6, 0x8000b, 0x8008b, 0x8004b, 0x900f6, 0x70103, 0x80057, 0x80017, 0x8011f, 0x70113, 0x80077, 0x80037, 0x900ce, 0x7010b, 0x80067, 0x80027, 0x900ae, 0x80007, 0x80087, 0x80047, 0x900ee, 0x70107, 0x8005f, 0x8001f, 0x9009e, 0x70117, 0x8007f, 0x8003f, 0x900de, 0x7010f, 0x8006f, 0x8002f, 0x900be, 0x8000f, 0x8008f, 0x8004f, 0x900fe, 0x70100, 0x80050, 0x80010, 0x80118, 0x70110, 0x80070, 0x80030, 0x900c1, 0x70108, 0x80060, 0x80020, 0x900a1, 0x80000, 0x80080, 0x80040, 0x900e1, 0x70104, 0x80058, 0x80018, 0x90091, 0x70114, 0x80078, 0x80038, 0x900d1, 0x7010c, 0x80068, 0x80028, 0x900b1, 0x80008, 0x80088, 0x80048, 0x900f1, 0x70102, 0x80054, 0x80014, 0x8011c, 0x70112, 0x80074, 0x80034, 0x900c9, 0x7010a, 0x80064, 0x80024, 0x900a9, 0x80004, 0x80084, 0x80044, 0x900e9, 0x70106, 0x8005c, 0x8001c, 0x90099, 0x70116, 0x8007c, 0x8003c, 0x900d9, 0x7010e, 0x8006c, 0x8002c, 0x900b9, 0x8000c, 0x8008c, 0x8004c, 0x900f9, 0x70101, 0x80052, 0x80012, 0x8011a, 0x70111, 0x80072, 0x80032, 0x900c5, 0x70109, 0x80062, 0x80022, 0x900a5, 0x80002, 0x80082, 0x80042, 0x900e5, 0x70105, 0x8005a, 0x8001a, 0x90095, 0x70115, 0x8007a, 0x8003a, 0x900d5, 0x7010d, 0x8006a, 0x8002a, 0x900b5, 0x8000a, 0x8008a, 0x8004a, 0x900f5, 0x70103, 0x80056, 0x80016, 0x8011e, 0x70113, 0x80076, 0x80036, 0x900cd, 0x7010b, 0x80066, 0x80026, 0x900ad, 0x80006, 0x80086, 0x80046, 0x900ed, 0x70107, 0x8005e, 0x8001e, 0x9009d, 0x70117, 0x8007e, 0x8003e, 0x900dd, 0x7010f, 0x8006e, 0x8002e, 0x900bd, 0x8000e, 0x8008e, 0x8004e, 0x900fd, 0x70100, 0x80051, 0x80011, 0x80119, 0x70110, 0x80071, 0x80031, 0x900c3, 0x70108, 0x80061, 0x80021, 0x900a3, 0x80001, 0x80081, 0x80041, 0x900e3, 0x70104, 0x80059, 0x80019, 0x90093, 0x70114, 0x80079, 0x80039, 0x900d3, 0x7010c, 0x80069, 0x80029, 0x900b3, 0x80009, 0x80089, 0x80049, 0x900f3, 0x70102, 0x80055, 0x80015, 0x8011d, 0x70112, 0x80075, 0x80035, 0x900cb, 0x7010a, 0x80065, 0x80025, 0x900ab, 0x80005, 0x80085, 0x80045, 0x900eb, 0x70106, 0x8005d, 0x8001d, 0x9009b, 0x70116, 0x8007d, 0x8003d, 0x900db, 0x7010e, 0x8006d, 0x8002d, 0x900bb, 0x8000d, 0x8008d, 0x8004d, 0x900fb, 0x70101, 0x80053, 0x80013, 0x8011b, 0x70111, 0x80073, 0x80033, 0x900c7, 0x70109, 0x80063, 0x80023, 0x900a7, 0x80003, 0x80083, 0x80043, 0x900e7, 0x70105, 0x8005b, 0x8001b, 0x90097, 0x70115, 0x8007b, 0x8003b, 0x900d7, 0x7010d, 0x8006b, 0x8002b, 0x900b7, 0x8000b, 0x8008b, 0x8004b, 0x900f7, 0x70103, 0x80057, 0x80017, 0x8011f, 0x70113, 0x80077, 0x80037, 0x900cf, 0x7010b, 0x80067, 0x80027, 0x900af, 0x80007, 0x80087, 0x80047, 0x900ef, 0x70107, 0x8005f, 0x8001f, 0x9009f, 0x70117, 0x8007f, 0x8003f, 0x900df, 0x7010f, 0x8006f, 0x8002f, 0x900bf, 0x8000f, 0x8008f, 0x8004f, 0x900ff ]), 9]; var fixedDistCodeTab = [new Uint32Array([ 0x50000, 0x50010, 0x50008, 0x50018, 0x50004, 0x50014, 0x5000c, 0x5001c, 0x50002, 0x50012, 0x5000a, 0x5001a, 0x50006, 0x50016, 0x5000e, 0x00000, 0x50001, 0x50011, 0x50009, 0x50019, 0x50005, 0x50015, 0x5000d, 0x5001d, 0x50003, 0x50013, 0x5000b, 0x5001b, 0x50007, 0x50017, 0x5000f, 0x00000 ]), 5]; function error(e) { throw new Error(e) } function constructor(bytes) { //var bytes = stream.getBytes(); var bytesPos = 0; var cmf = bytes[bytesPos++]; var flg = bytes[bytesPos++]; if (cmf == -1 || flg == -1) error('Invalid header in flate stream'); if ((cmf & 0x0f) != 0x08) error('Unknown compression method in flate stream'); if ((((cmf << 8) + flg) % 31) != 0) error('Bad FCHECK in flate stream'); if (flg & 0x20) error('FDICT bit set in flate stream'); this.bytes = bytes; this.bytesPos = bytesPos; this.codeSize = 0; this.codeBuf = 0; DecodeStream.call(this); } constructor.prototype = Object.create(DecodeStream.prototype); constructor.prototype.getBits = function(bits) { var codeSize = this.codeSize; var codeBuf = this.codeBuf; var bytes = this.bytes; var bytesPos = this.bytesPos; var b; while (codeSize < bits) { if (typeof (b = bytes[bytesPos++]) == 'undefined') error('Bad encoding in flate stream'); codeBuf |= b << codeSize; codeSize += 8; } b = codeBuf & ((1 << bits) - 1); this.codeBuf = codeBuf >> bits; this.codeSize = codeSize -= bits; this.bytesPos = bytesPos; return b; }; constructor.prototype.getCode = function(table) { var codes = table[0]; var maxLen = table[1]; var codeSize = this.codeSize; var codeBuf = this.codeBuf; var bytes = this.bytes; var bytesPos = this.bytesPos; while (codeSize < maxLen) { var b; if (typeof (b = bytes[bytesPos++]) == 'undefined') error('Bad encoding in flate stream'); codeBuf |= (b << codeSize); codeSize += 8; } var code = codes[codeBuf & ((1 << maxLen) - 1)]; var codeLen = code >> 16; var codeVal = code & 0xffff; if (codeSize == 0 || codeSize < codeLen || codeLen == 0) error('Bad encoding in flate stream'); this.codeBuf = (codeBuf >> codeLen); this.codeSize = (codeSize - codeLen); this.bytesPos = bytesPos; return codeVal; }; constructor.prototype.generateHuffmanTable = function(lengths) { var n = lengths.length; // find max code length var maxLen = 0; for (var i = 0; i < n; ++i) { if (lengths[i] > maxLen) maxLen = lengths[i]; } // build the table var size = 1 << maxLen; var codes = new Uint32Array(size); for (var len = 1, code = 0, skip = 2; len <= maxLen; ++len, code <<= 1, skip <<= 1) { for (var val = 0; val < n; ++val) { if (lengths[val] == len) { // bit-reverse the code var code2 = 0; var t = code; for (var i = 0; i < len; ++i) { code2 = (code2 << 1) | (t & 1); t >>= 1; } // fill the table entries for (var i = code2; i < size; i += skip) codes[i] = (len << 16) | val; ++code; } } } return [codes, maxLen]; }; constructor.prototype.readBlock = function() { function repeat(stream, array, len, offset, what) { var repeat = stream.getBits(len) + offset; while (repeat-- > 0) array[i++] = what; } // read block header var hdr = this.getBits(3); if (hdr & 1) this.eof = true; hdr >>= 1; if (hdr == 0) { // uncompressed block var bytes = this.bytes; var bytesPos = this.bytesPos; var b; if (typeof (b = bytes[bytesPos++]) == 'undefined') error('Bad block header in flate stream'); var blockLen = b; if (typeof (b = bytes[bytesPos++]) == 'undefined') error('Bad block header in flate stream'); blockLen |= (b << 8); if (typeof (b = bytes[bytesPos++]) == 'undefined') error('Bad block header in flate stream'); var check = b; if (typeof (b = bytes[bytesPos++]) == 'undefined') error('Bad block header in flate stream'); check |= (b << 8); if (check != (~blockLen & 0xffff)) error('Bad uncompressed block length in flate stream'); this.codeBuf = 0; this.codeSize = 0; var bufferLength = this.bufferLength; var buffer = this.ensureBuffer(bufferLength + blockLen); var end = bufferLength + blockLen; this.bufferLength = end; for (var n = bufferLength; n < end; ++n) { if (typeof (b = bytes[bytesPos++]) == 'undefined') { this.eof = true; break; } buffer[n] = b; } this.bytesPos = bytesPos; return; } var litCodeTable; var distCodeTable; if (hdr == 1) { // compressed block, fixed codes litCodeTable = fixedLitCodeTab; distCodeTable = fixedDistCodeTab; } else if (hdr == 2) { // compressed block, dynamic codes var numLitCodes = this.getBits(5) + 257; var numDistCodes = this.getBits(5) + 1; var numCodeLenCodes = this.getBits(4) + 4; // build the code lengths code table var codeLenCodeLengths = Array(codeLenCodeMap.length); var i = 0; while (i < numCodeLenCodes) codeLenCodeLengths[codeLenCodeMap[i++]] = this.getBits(3); var codeLenCodeTab = this.generateHuffmanTable(codeLenCodeLengths); // build the literal and distance code tables var len = 0; var i = 0; var codes = numLitCodes + numDistCodes; var codeLengths = new Array(codes); while (i < codes) { var code = this.getCode(codeLenCodeTab); if (code == 16) { repeat(this, codeLengths, 2, 3, len); } else if (code == 17) { repeat(this, codeLengths, 3, 3, len = 0); } else if (code == 18) { repeat(this, codeLengths, 7, 11, len = 0); } else { codeLengths[i++] = len = code; } } litCodeTable = this.generateHuffmanTable(codeLengths.slice(0, numLitCodes)); distCodeTable = this.generateHuffmanTable(codeLengths.slice(numLitCodes, codes)); } else { error('Unknown block type in flate stream'); } var buffer = this.buffer; var limit = buffer ? buffer.length : 0; var pos = this.bufferLength; while (true) { var code1 = this.getCode(litCodeTable); if (code1 < 256) { if (pos + 1 >= limit) { buffer = this.ensureBuffer(pos + 1); limit = buffer.length; } buffer[pos++] = code1; continue; } if (code1 == 256) { this.bufferLength = pos; return; } code1 -= 257; code1 = lengthDecode[code1]; var code2 = code1 >> 16; if (code2 > 0) code2 = this.getBits(code2); var len = (code1 & 0xffff) + code2; code1 = this.getCode(distCodeTable); code1 = distDecode[code1]; code2 = code1 >> 16; if (code2 > 0) code2 = this.getBits(code2); var dist = (code1 & 0xffff) + code2; if (pos + len >= limit) { buffer = this.ensureBuffer(pos + len); limit = buffer.length; } for (var k = 0; k < len; ++k, ++pos) buffer[pos] = buffer[pos - dist]; } }; return constructor; })(); // Copyright 2024 The Google Research Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /* * MIT LICENSE * Copyright (c) 2011 Devon Govett * * Permission is hereby granted, free of charge, to any person obtaining a copy of this * software and associated documentation files (the "Software"), to deal in the Software * without restriction, including without limitation the rights to use, copy, modify, merge, * publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons * to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or * substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING * BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * * This implementation slightly deviates from that of * https://unpkg.com/png-js@1.0.0/png.js in that references to "document" * and "window" have been removed. * */ let PNG = (function() { let APNG_DISPOSE_OP_NONE = 0; let APNG_DISPOSE_OP_BACKGROUND = 1; let APNG_DISPOSE_OP_PREVIOUS = 2; let APNG_BLEND_OP_SOURCE = 0; let APNG_BLEND_OP_OVER = 1; class PNG { static load(url, canvas, callback) { if (typeof canvas === 'function') { callback = canvas; } const xhr = new XMLHttpRequest(); xhr.open('GET', url, true); xhr.responseType = 'arraybuffer'; xhr.onload = () => { const data = new Uint8Array(xhr.response || xhr.mozResponseArrayBuffer); const png = new PNG(data); if (typeof (canvas && canvas.getContext) === 'function') { png.render(canvas); } return typeof callback === 'function' ? callback(png) : undefined; }; return xhr.send(null); } constructor(data1) { let i; this.data = data1; this.pos = 8; // Skip the default header this.palette = []; this.imgData = []; this.transparency = {}; this.animation = null; this.text = {}; let frame = null; while (true) { var data; let chunkSize = this.readUInt32(); let section = ''; for (i = 0; i < 4; i++) { section += String.fromCharCode(this.data[this.pos++]); } switch (section) { case 'IHDR': // we can grab interesting values from here (like width, height, etc) this.width = this.readUInt32(); this.height = this.readUInt32(); this.bits = this.data[this.pos++]; this.colorType = this.data[this.pos++]; this.compressionMethod = this.data[this.pos++]; this.filterMethod = this.data[this.pos++]; this.interlaceMethod = this.data[this.pos++]; break; case 'acTL': // we have an animated PNG this.animation = { numFrames: this.readUInt32(), numPlays: this.readUInt32() || Infinity, frames: [] }; break; case 'PLTE': this.palette = this.read(chunkSize); break; case 'fcTL': if (frame) { this.animation.frames.push(frame); } this.pos += 4; // skip sequence number frame = { width: this.readUInt32(), height: this.readUInt32(), xOffset: this.readUInt32(), yOffset: this.readUInt32() }; var delayNum = this.readUInt16(); var delayDen = this.readUInt16() || 100; frame.delay = (1000 * delayNum) / delayDen; frame.disposeOp = this.data[this.pos++]; frame.blendOp = this.data[this.pos++]; frame.data = []; break; case 'IDAT': case 'fdAT': if (section === 'fdAT') { this.pos += 4; // skip sequence number chunkSize -= 4; } data = (frame && frame.data) || this.imgData; for (i = 0; i < chunkSize; i++) { data.push(this.data[this.pos++]); } break; case 'tRNS': // This chunk can only occur once and it must occur after the // PLTE chunk and before the IDAT chunk. this.transparency = {}; switch (this.colorType) { case 3: // Indexed color, RGB. Each byte in this chunk is an alpha for // the palette index in the PLTE ("palette") chunk up until the // last non-opaque entry. Set up an array, stretching over all // palette entries which will be 0 (opaque) or 1 (transparent). this.transparency.indexed = this.read(chunkSize); var short = 255 - this.transparency.indexed.length; if (short > 0) { for (i = 0; i < short; i++) { this.transparency.indexed.push(255); } } break; case 0: // Greyscale. Corresponding to entries in the PLTE chunk. // Grey is two bytes, range 0 .. (2 ^ bit-depth) - 1 this.transparency.grayscale = this.read(chunkSize)[0]; break; case 2: // True color with proper alpha channel. this.transparency.rgb = this.read(chunkSize); break; } break; case 'tEXt': var text = this.read(chunkSize); var index = text.indexOf(0); var key = String.fromCharCode.apply(String, text.slice(0, index)); this.text[key] = String.fromCharCode.apply( String, text.slice(index + 1) ); break; case 'IEND': if (frame) { this.animation.frames.push(frame); } // we've got everything we need! switch (this.colorType) { case 0: case 3: case 4: this.colors = 1; break; case 2: case 6: this.colors = 3; break; } this.hasAlphaChannel = [4, 6].includes(this.colorType); var colors = this.colors + (this.hasAlphaChannel ? 1 : 0); this.pixelBitlength = this.bits * colors; switch (this.colors) { case 1: this.colorSpace = 'DeviceGray'; break; case 3: this.colorSpace = 'DeviceRGB'; break; } this.imgData = new Uint8Array(this.imgData); return; break; default: // unknown (or unimportant) section, skip it this.pos += chunkSize; } this.pos += 4; // Skip the CRC if (this.pos > this.data.length) { throw new Error('Incomplete or corrupt PNG file'); } } } read(bytes) { const result = new Array(bytes); for (let i = 0; i < bytes; i++) { result[i] = this.data[this.pos++]; } return result; } readUInt32() { const b1 = this.data[this.pos++] << 24; const b2 = this.data[this.pos++] << 16; const b3 = this.data[this.pos++] << 8; const b4 = this.data[this.pos++]; return b1 | b2 | b3 | b4; } readUInt16() { const b1 = this.data[this.pos++] << 8; const b2 = this.data[this.pos++]; return b1 | b2; } decodePixels(data) { if (data == null) { data = this.imgData; } if (data.length === 0) { return new Uint8Array(0); } data = new FlateStream(data); data = data.getBytes(); const { width, height } = this; const pixelBytes = this.pixelBitlength / 8; const pixels = new Uint8Array(width * height * pixelBytes); const { length } = data; let pos = 0; function pass(x0, y0, dx, dy, singlePass = false) { const w = Math.ceil((width - x0) / dx); const h = Math.ceil((height - y0) / dy); const scanlineLength = pixelBytes * w; const buffer = singlePass ? pixels : new Uint8Array(scanlineLength * h); let row = 0; let c = 0; while (row < h && pos < length) { var byte, col, i, left, upper; switch (data[pos++]) { case 0: // None for (i = 0; i < scanlineLength; i++) { buffer[c++] = data[pos++]; } break; case 1: // Sub for (i = 0; i < scanlineLength; i++) { byte = data[pos++]; left = i < pixelBytes ? 0 : buffer[c - pixelBytes]; buffer[c++] = (byte + left) % 256; } break; case 2: // Up for (i = 0; i < scanlineLength; i++) { byte = data[pos++]; col = (i - (i % pixelBytes)) / pixelBytes; upper = row && buffer[ (row - 1) * scanlineLength + col * pixelBytes + (i % pixelBytes) ]; buffer[c++] = (upper + byte) % 256; } break; case 3: // Average for (i = 0; i < scanlineLength; i++) { byte = data[pos++]; col = (i - (i % pixelBytes)) / pixelBytes; left = i < pixelBytes ? 0 : buffer[c - pixelBytes]; upper = row && buffer[ (row - 1) * scanlineLength + col * pixelBytes + (i % pixelBytes) ]; buffer[c++] = (byte + Math.floor((left + upper) / 2)) % 256; } break; case 4: // Paeth for (i = 0; i < scanlineLength; i++) { var paeth, upperLeft; byte = data[pos++]; col = (i - (i % pixelBytes)) / pixelBytes; left = i < pixelBytes ? 0 : buffer[c - pixelBytes]; if (row === 0) { upper = upperLeft = 0; } else { upper = buffer[ (row - 1) * scanlineLength + col * pixelBytes + (i % pixelBytes) ]; upperLeft = col && buffer[ (row - 1) * scanlineLength + (col - 1) * pixelBytes + (i % pixelBytes) ]; } const p = left + upper - upperLeft; const pa = Math.abs(p - left); const pb = Math.abs(p - upper); const pc = Math.abs(p - upperLeft); if (pa <= pb && pa <= pc) { paeth = left; } else if (pb <= pc) { paeth = upper; } else { paeth = upperLeft; } buffer[c++] = (byte + paeth) % 256; } break; default: throw new Error(`Invalid filter algorithm: ${data[pos - 1]}`); } if (!singlePass) { let pixelsPos = ((y0 + row * dy) * width + x0) * pixelBytes; let bufferPos = row * scanlineLength; for (i = 0; i < w; i++) { for (let j = 0; j < pixelBytes; j++) pixels[pixelsPos++] = buffer[bufferPos++]; pixelsPos += (dx - 1) * pixelBytes; } } row++; } } if (this.interlaceMethod === 1) { /* 1 6 4 6 2 6 4 6 7 7 7 7 7 7 7 7 5 6 5 6 5 6 5 6 7 7 7 7 7 7 7 7 3 6 4 6 3 6 4 6 7 7 7 7 7 7 7 7 5 6 5 6 5 6 5 6 7 7 7 7 7 7 7 7 */ pass(0, 0, 8, 8); // 1 pass(4, 0, 8, 8); // 2 pass(0, 4, 4, 8); // 3 pass(2, 0, 4, 4); // 4 pass(0, 2, 2, 4); // 5 pass(1, 0, 2, 2); // 6 pass(0, 1, 1, 2); // 7 } else { pass(0, 0, 1, 1, true); } return pixels; } decodePalette() { const { palette } = this; const { length } = palette; const transparency = this.transparency.indexed || []; const ret = new Uint8Array((transparency.length || 0) + length); let pos = 0; let c = 0; for (let i = 0; i < length; i += 3) { var left; ret[pos++] = palette[i]; ret[pos++] = palette[i + 1]; ret[pos++] = palette[i + 2]; ret[pos++] = (left = transparency[c++]) != null ? left : 255; } return ret; } copyToImageData(imageData, pixels) { let j, k; let { colors } = this; let palette = null; let alpha = this.hasAlphaChannel; if (this.palette.length) { palette = this._decodedPalette || (this._decodedPalette = this.decodePalette()); colors = 4; alpha = true; } const data = imageData.data || imageData; const { length } = data; const input = palette || pixels; let i = (j = 0); if (colors === 1) { while (i < length) { k = palette ? pixels[i / 4] * 4 : j; const v = input[k++]; data[i++] = v; data[i++] = v; data[i++] = v; data[i++] = alpha ? input[k++] : 255; j = k; } } else { while (i < length) { k = palette ? pixels[i / 4] * 4 : j; data[i++] = input[k++]; data[i++] = input[k++]; data[i++] = input[k++]; data[i++] = alpha ? input[k++] : 255; j = k; } } } decode() { const ret = new Uint8Array(this.width * this.height * 4); this.copyToImageData(ret, this.decodePixels()); return ret; } renderFrame(ctx, number) { const { frames } = this.animation; const frame = frames[number]; const prev = frames[number - 1]; // if we're on the first frame, clear the canvas if (number === 0) { ctx.clearRect(0, 0, this.width, this.height); } // check the previous frame's dispose operation if ((prev && prev.disposeOp) === APNG_DISPOSE_OP_BACKGROUND) { ctx.clearRect(prev.xOffset, prev.yOffset, prev.width, prev.height); } else if ((prev && prev.disposeOp) === APNG_DISPOSE_OP_PREVIOUS) { ctx.putImageData(prev.imageData, prev.xOffset, prev.yOffset); } // APNG_BLEND_OP_SOURCE overwrites the previous data if (frame.blendOp === APNG_BLEND_OP_SOURCE) { ctx.clearRect(frame.xOffset, frame.yOffset, frame.width, frame.height); } // draw the current frame return ctx.drawImage(frame.image, frame.xOffset, frame.yOffset); } animate(ctx) { let frameNumber = 0; const { numFrames, frames, numPlays } = this.animation; const doFrame = () => { const f = frameNumber++ % numFrames; const frame = frames[f]; this.renderFrame(ctx, f); if (numFrames > 1 && frameNumber / numFrames < numPlays) { this.animation._timeout = setTimeout(doFrame, frame.delay); } }; doFrame(); } stopAnimation() { return clearTimeout(this.animation && this.animation._timeout); } render(canvas) { // if this canvas was displaying another image before, // stop the animation on it if (canvas._png) { canvas._png.stopAnimation(); } canvas._png = this; canvas.width = this.width; canvas.height = this.height; const ctx = canvas.getContext('2d'); if (this.animation) { this.decodeFrames(ctx); return this.animate(ctx); } else { const data = ctx.createImageData(this.width, this.height); this.copyToImageData(data, this.decodePixels()); return ctx.putImageData(data, 0, 0); } } } return PNG; })(); // Copyright 2024 The Google Research Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview Description of this file. */ function createSceneSpec(sceneParams) { return { assetType: 'scene', occupancyGridsSpec: createOccupancyGridsSpec(sceneParams), distanceGridsSpec: createDistanceGridsSpec(sceneParams), triplaneSpec: createTriplaneSpec(sceneParams), sparseGridSpec: createSparseGridSpec(sceneParams), }; } function createOccupancyGridsSpec(sceneParams) { // Decide on which resolutions to load. let blockSizes = [8, 16, 32, 64, 128]; if (sceneParams['useBits']) { blockSizes = [8, 32, 128]; } if (sceneParams['useDistanceGrid']) { blockSizes = [8]; } // Create one spec per occupancy grid. let occupancyGridSpecs = blockSizes.map(blockSize => { return createOccupancyGridSpec(sceneParams, 'occupancy_grid', blockSize); }) // Create one spec for all occupancy grids. return { assetType: 'occupancy_grids', gridSpecs: occupancyGridSpecs, blockSizes: blockSizes }; } function createDistanceGridsSpec(sceneParams) { if (!sceneParams['useDistanceGrid']) { return {assetType: 'distance_grids', gridSpecs: [], blockSizes: []}; } let blockSize = 8; // Create one spec for all occupancy grids. return { assetType: 'distance_grids', gridSpecs: [createOccupancyGridSpec(sceneParams, 'distance_grid', blockSize)], blockSizes: [blockSize], }; } function createSparseGridSpec(sceneParams) { const fileExtension = sceneParams['export_array_format'] || 'png'; // Are RGB and density stored as two separate images? let separateRgbAndDensity = getFieldOrDefault( sceneParams, 'export_store_rgb_and_density_separately', false); let result = { assetType: 'sparse_grid', blockIndicesSpec: createSparseGridBlockIndicesSpec(sceneParams), separateRgbAndDensity: separateRgbAndDensity, }; if (separateRgbAndDensity) { result.rgbSpec = createSparseGridAssetSpec(sceneParams, 'sparse_grid_rgb', 3); result.densitySpec = createSparseGridAssetSpec(sceneParams, 'sparse_grid_density', 2); // If RGB and density are stored separately, the final channel from features // is omitted. It's stored as the second channel in density instead. result.featuresSpec = createSparseGridAssetSpec(sceneParams, 'sparse_grid_features', 3); } else { result.rgbAndDensitySpec = createSparseGridAssetSpec( sceneParams, 'sparse_grid_rgb_and_density', 4); result.featuresSpec = createSparseGridAssetSpec(sceneParams, 'sparse_grid_features', 4); } return result; } function createSparseGridBlockIndicesSpec(sceneParams) { const fileExtension = sceneParams['export_array_format'] || 'png'; let gridSize = sceneParams['sparse_grid_resolution'] / sceneParams['data_block_size']; return { assetType: 'sparse_grid_block_indices', filename: `sparse_grid_block_indices.${fileExtension}`, shape: [gridSize, gridSize, gridSize], numChannels: 3, }; } function createSparseGridAssetSpec(sceneParams, prefix, numChannels) { const fileExtension = sceneParams['export_array_format'] || 'png'; let numSlices = sceneParams['num_slices']; let width = sceneParams['atlas_width']; let height = sceneParams['atlas_height']; let depth = sceneParams['atlas_depth']; let sliceDepth = Math.ceil(depth / numSlices); // Create a spec for each slice. let sliceSpecs = []; for (let i = 0; i < numSlices; ++i) { const sliceIndex = digits(i, 3); filename = `${prefix}_${sliceIndex}.${fileExtension}`; sliceSpecs.push({ assetType: `${prefix}_slice`, shape: [width, height, sliceDepth], numChannels: numChannels, sliceIndex: i, numSlices: numSlices, filename: filename, }); } // Create a spec for all slices. return { assetType: `${prefix}_slices`, shape: [width, height, depth], numChannels: numChannels, sliceSpecs: sliceSpecs, numSlices: numSlices, mergeSlices: getMergeSlices(sceneParams), }; } /** * Creates a spec for a potentially-sliced grid texture. */ function createOccupancyGridSpec(sceneParams, prefix, blockSize) { // 3D grids with a resolution higher than 256^3 are split into 8 sliced along // the depth dimension. const kMaxNonSlicedVolumeSize = 256; const kNumEmptySpaceCharts = 8; const fileExtension = sceneParams['export_array_format'] || 'png'; const resolutionToUse = sceneParams['triplane_resolution']; const voxelSizeToUse = sceneParams['triplane_voxel_size']; // Number of voxels in each dimension. const gridSize = Math.ceil(resolutionToUse / blockSize); // Side-length of a occupancy grid voxel in squash coordinates. const voxelSize = voxelSizeToUse * blockSize; // Determine if grid is sliced or not. let exportSlicedGrids = getFieldOrDefault( sceneParams, 'export_slice_occupancy_and_distance_grids', true); const isSliced = (exportSlicedGrids && gridSize > kMaxNonSlicedVolumeSize); // Determine the number of color channels in the grid. let isPadded = getFieldOrDefault( sceneParams, 'export_pad_occupancy_and_distance_grids', true); let numChannels = isPadded ? 4 : 1; let sliceSpecs = []; if (sceneParams['legacyGrids'] || !isSliced) { // Grid is contained entirely within one file. let filename = `${prefix}_${blockSize}.${fileExtension}`; if (!sceneParams['legacyGrids']) { filename = `${prefix}_${blockSize}_000.${fileExtension}`; } sliceSpecs.push({ assetType: `${prefix}_slice`, shape: [gridSize, gridSize, gridSize], numChannels: numChannels, sliceIndex: 0, numSlices: 1, filename: filename, }); } else { // Grid is split across several different files. const sliceDepth = Math.ceil(gridSize / kNumEmptySpaceCharts); // Create a spec for each slice. for (let i = 0; i < kNumEmptySpaceCharts; ++i) { const sliceIndex = digits(i, 3); let filename = `${prefix}_${blockSize}_${sliceIndex}.${fileExtension}`; sliceSpecs.push({ assetType: `${prefix}_slice`, shape: [gridSize, gridSize, sliceDepth], numChannels: numChannels, sliceIndex: i, numSlices: kNumEmptySpaceCharts, filename: filename, }); } } // Create a spec for all slices. return { assetType: `${prefix}_slices`, shape: [gridSize, gridSize, gridSize], numChannels: numChannels, voxelSize: voxelSize, blockSize: blockSize, sliceSpecs: sliceSpecs, numSlices: kNumEmptySpaceCharts, mergeSlices: getMergeSlices(sceneParams), }; } function createTriplaneSpec(sceneParams) { const gridSize = sceneParams['triplane_resolution']; const voxelSize = sceneParams['triplane_voxel_size']; let separateRgbAndDensity = getFieldOrDefault( sceneParams, 'export_store_rgb_and_density_separately', false); let result = { assetType: 'triplane', shape: [gridSize, gridSize, 3], numSlices: 3, voxelSize: voxelSize, separateRgbAndDensity: separateRgbAndDensity, featuresSpec: createTriplaneSlicesSpec(sceneParams, 'triplane_features', 4), }; if (result.separateRgbAndDensity) { result.rgbSpec = createTriplaneSlicesSpec(sceneParams, 'triplane_rgb', 3); result.densitySpec = createTriplaneSlicesSpec(sceneParams, 'triplane_density', 1); } else { result.rgbAndDensitySpec = createTriplaneSlicesSpec(sceneParams, 'triplane_rgb_and_density', 4); } return result; } function createTriplaneSlicesSpec(sceneParams, prefix, numChannels) { const gridSize = sceneParams['triplane_resolution']; return { assetType: `${prefix}_slices`, shape: [gridSize, gridSize, 3], numChannels: numChannels, numSlices: 3, mergeSlices: getMergeSlices(sceneParams), sliceSpecs: range(3).map( (i) => createPlaneSliceSpec(sceneParams, prefix, numChannels, i)), }; } function createPlaneSliceSpec(sceneParams, prefix, numChannels, sliceIndex) { const fileExtension = sceneParams['export_array_format'] || 'png'; const gridSize = sceneParams['triplane_resolution']; // Filenames start with "plane", not "triplane". let filenamePrefix = prefix.replace(/^triplane_/, 'plane_'); return { assetType: `${prefix}_slice`, shape: [gridSize, gridSize, 1], numChannels: numChannels, sliceIndex: sliceIndex, numSlices: 3, filename: `${filenamePrefix}_${sliceIndex}.${fileExtension}`, }; } function getMergeSlices(sceneParams) { // Slices can only be merged if rgb and density are stored separately. const separateRgbAndDensity = getFieldOrDefault( sceneParams, 'export_store_rgb_and_density_separately', false); const mergeSlices = getFieldOrDefault(sceneParams, 'merge_slices', !separateRgbAndDensity); if (!separateRgbAndDensity && !mergeSlices) { throw new Error( 'Slices must be merged when using "rgb_and_density" images. Please ' + 're-export with export_store_rgb_and_density_separately=true and try ' + 'again.'); } return mergeSlices && separateRgbAndDensity; } // Copyright 2024 The Google Research Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview Utilities for fetching assets. */ const baseGoogleApiUrl = 'https://firebasestorage.googleapis.com/v0/b/test3-2d896.appspot.com/o'; // Gestion centralisée du token let gcsToken = null; function fetchGcsToken() { if (gcsToken) { return gcsToken; } gcsToken = fetch('http://localhost:3001/get-gcs-token') .then(response => { if (!response.ok) { throw new Error(`Erreur lors de la récupération du jeton GCS: ${response.status}`); } return response.json(); }) .then(data => { gcsToken = data.token; // Réinitialiser la promesse après la résolution pour récupérer un nouveau token plus tard si nécessaire setTimeout(() => { gcsToken = null; }, 14 * 60 * 1000); // Expiration du token après 14 minutes return gcsToken; }) .catch(error => { console.error('Erreur lors de la récupération du jeton GCS:', error); gcsToken = null; // En cas d'erreur, réinitialiser pour permettre une nouvelle tentative throw error; }); return gcsTokenPromise; } /** * Fetch a set of occupancy grids. * @param {*} spec * @param {*} router * @returns */ function fetchScene(spec, router) { return { ...spec, occupancyGridsAsset: fetchAsset(spec.occupancyGridsSpec, router), distanceGridsAsset: fetchAsset(spec.distanceGridsSpec, router), triplaneAsset: fetchAsset(spec.triplaneSpec, router), sparseGridAsset: fetchAsset(spec.sparseGridSpec, router), }; } /** * Fetch a set of occupancy grids. * @param {*} spec * @param {*} router * @returns */ function fetchOccupancyGrids(spec, router) { let gridAssets = spec.gridSpecs.map((gridSpec) => fetchAsset(gridSpec, router)); return { ...spec, gridAssets: gridAssets }; } /** * Fetch assets for a sliced grid. * @param {*} spec * @param {*} router * @returns */ function fetchSlices(spec, router) { let sliceAssets = spec.sliceSpecs.map((sliceSpec) => fetchAsset(sliceSpec, router)); return { ...spec, sliceAssets: sliceAssets }; } /** * Fetch triplane representation. * @param {*} spec * @param {*} router * @returns */ function fetchTriplane(spec, router) { let result = { ...spec, featuresAsset: fetchAsset(spec.featuresSpec, router) }; if (spec.separateRgbAndDensity) { result.rgbAsset = fetchAsset(spec.rgbSpec, router); result.densityAsset = fetchAsset(spec.densitySpec, router); } else { result.rgbAndDensityAsset = fetchAsset(spec.rgbAndDensitySpec, router); } return result; } /** * Fetch a flat, monolithic array from GCS. * @param {*} spec - The specification for the asset. * @param {Router} router - Router object that contains the directory URL information. * @returns {object} The spec object with the fetched asset. */ function fetchArray(spec, router) { // Extract dirUrl from the Router object const dirUrl = router.dirUrl; // Check that dirUrl is a valid string if (typeof dirUrl !== 'string') { console.error('dirUrl is not a valid string:', dirUrl); throw new Error('dirUrl must be a valid string'); } // Perform validation on the spec if (!spec.filename) { console.error('Spec missing filename:', spec); throw new Error('Spec must contain a valid filename'); } // Clean the path to remove relative segments (`../`) const cleanedDirUrl = cleanPath(dirUrl); // Ensure the filename is properly formatted const cleanedFilename = spec.filename.endsWith('.gz') ? spec.filename.slice(0, -3) : spec.filename; // Construct the full path by encoding both parts const encodedDirUrl = encodeURIComponent(cleanedDirUrl); const encodedFilename = cleanedFilename; // Build the full URL for accessing Google Cloud Storage const fullUrl = `${baseGoogleApiUrl}/users%2Fvisite_3D%2F${encodedDirUrl}%2F${encodedFilename}?alt=media&token=${gcsToken}`; console.log('Fetching from URL:', fullUrl); // Make the request to load the asset const asset = loadAsset(fullUrl).then(validateSize(spec)).then(onImageLoaded); return { ...spec, asset: asset }; } /** * Validate the size of the fetched asset. * @param {*} spec * @returns {function} */ function validateSize(spec) { return (assetBuffer) => { const expectedSize = product(spec.shape) * spec.numChannels; console.assert(assetBuffer.length === expectedSize, `Size mismatch for ${spec.filename}`, spec, assetBuffer); return assetBuffer; }; } /** * Cleans the given path by removing any `../` or `./` segments. * @param {string} path - The path to be cleaned. * @return {string} The cleaned path. */ function cleanPath(path) { const parts = path.split('/'); const stack = []; for (let i = 0; i < parts.length; i++) { if (parts[i] === '..') { if (stack.length) { stack.pop(); } } else if (parts[i] !== '.' && parts[i] !== '') { stack.push(parts[i]); } } return stack.join('/'); } /** * Fetches sparse grid assets from GCS. * @param {*} spec * @param {*} router * @returns */ function fetchSparseGrid(spec, router) { let result = { ...spec, blockIndicesAsset: fetchAsset(spec.blockIndicesSpec, router), featuresAsset: fetchAsset(spec.featuresSpec, router), }; if (spec.separateRgbAndDensity) { result.rgbAsset = fetchAsset(spec.rgbSpec, router); result.densityAsset = fetchAsset(spec.densitySpec, router); } else { result.rgbAndDensityAsset = fetchAsset(spec.rgbAndDensitySpec, router); } return result; } /** * Report that no fetch function is available. */ function notImplementedError(spec, router) { console.error(`${spec.assetType} is not yet implemented`, spec); } const gFetchRegistry = { 'scene': fetchScene, // triplane 'triplane': fetchTriplane, 'triplane_rgb_and_density_slices': fetchSlices, 'triplane_rgb_and_density_slice': fetchArray, 'triplane_rgb_slices': fetchSlices, 'triplane_rgb_slice': fetchArray, 'triplane_density_slices': fetchSlices, 'triplane_density_slice': fetchArray, 'triplane_features_slices': fetchSlices, 'triplane_features_slice': fetchArray, // distance grids 'distance_grids': fetchOccupancyGrids, 'distance_grid_slices': fetchSlices, 'distance_grid_slice': fetchArray, // occupancy grids 'occupancy_grids': fetchOccupancyGrids, 'occupancy_grid_slices': fetchSlices, 'occupancy_grid_slice': fetchArray, // sparse grid 'sparse_grid': fetchSparseGrid, 'sparse_grid_block_indices': fetchArray, 'sparse_grid_rgb_and_density_slices': fetchSlices, 'sparse_grid_rgb_and_density_slice': fetchArray, 'sparse_grid_rgb_slices': fetchSlices, 'sparse_grid_rgb_slice': fetchArray, 'sparse_grid_density_slices': fetchSlices, 'sparse_grid_density_slice': fetchArray, 'sparse_grid_features_slices': fetchSlices, 'sparse_grid_features_slice': fetchArray, }; function fetchAsset(spec, router) { let fetchFn = gFetchRegistry[spec.assetType]; if (fetchFn == undefined) { console.error(`Failed to find fetchFn for assetType ${spec.assetType}`, spec); } return fetchFn(spec, router); } // Copyright 2024 The Google Research Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview Utilities for creating and initializing texture buffers. */ /** * Creates an empty volume texture. * * @param {number} width Width of the texture * @param {number} height Height of the texture * @param {number} depth Depth of the texture * @param {number} format Format of the texture * @param {number} filter Filter strategy of the texture * @return {!THREE.DataTexture3D} Volume texture */ function createEmptyVolumeTexture(width, height, depth, format, filter) { let volumeTexture = new THREE.DataTexture3D(null, width, height, depth); volumeTexture.internalFormat = getInternalFormat(format); volumeTexture.format = format; volumeTexture.generateMipmaps = false; volumeTexture.magFilter = volumeTexture.minFilter = filter; volumeTexture.wrapS = volumeTexture.wrapT = volumeTexture.wrapR = THREE.ClampToEdgeWrapping; volumeTexture.type = THREE.UnsignedByteType; volumeTexture.unpackAlignment = 1; gRenderer.initTexture(volumeTexture); return volumeTexture; } /** * Creates three empty, equally-sized textures to hold triplanes. * * @param {number} width Width of the texture * @param {number} height Height of the texture * @param {number} format Format of the texture * @return {!THREE.DataTexture2DArray} Texture array of size three */ function createEmptyTriplaneTextureArray(width, height, depth, format) { let texture = new THREE.DataTexture2DArray(null, width, height, depth); texture.internalFormat = getInternalFormat(format); texture.format = format; texture.generateMipmaps = false; texture.magFilter = texture.minFilter = THREE.LinearFilter; texture.wrapS = texture.wrapT = texture.wrapR = THREE.ClampToEdgeWrapping; texture.type = THREE.UnsignedByteType; texture.unpackAlignment = 1; gRenderer.initTexture(texture); return texture; } function getInternalFormat(format) { if (format == THREE.RedFormat) { return 'R8'; } else if (format == THREE.LuminanceAlphaFormat) { return 'LUMINANCE_ALPHA'; } else if (format == THREE.RGBFormat) { return 'RGB'; } else if (format == THREE.RGBAFormat) { return 'RGBA'; } throw new Error(`Unrecognized THREE.js format: ${format}`); } function createEmptySceneTexture(spec) { return { textureType: 'scene', occupancyGridsTexture: createEmptyTexture(spec.occupancyGridsSpec), distanceGridsTexture: createEmptyTexture(spec.distanceGridsSpec), triplaneTexture: createEmptyTexture(spec.triplaneSpec), sparseGridTexture: createEmptyTexture(spec.sparseGridSpec), }; } function createEmptyOccupancyGridsTexture(spec) { let textureType = spec.assetType.replace(/_slices$/, ''); let gridTextures = spec.gridSpecs.map(createEmptyTexture); return {textureType, gridTextures}; } function createEmptyOccupancyGridTexture(spec) { let textureType = spec.assetType.replace(/_slices$/, ''); let texture = createEmptyVolumeTexture( ...spec.shape, THREE.RedFormat, THREE.NearestFilter); return {textureType, texture}; } function createEmptyTriplaneTexture(spec) { let result = { textureType: 'triplane', featuresTexture: createEmptyTexture(spec.featuresSpec), }; if (spec.separateRgbAndDensity) { result.rgbTexture = createEmptyTexture(spec.rgbSpec); result.densityTexture = createEmptyTexture(spec.densitySpec); } else { let shape = spec.rgbAndDensitySpec.shape; result.rgbTexture = createEmptyTexture({ assetType: 'triplane_rgb_slices', shape: shape, }); result.densityTexture = createEmptyTexture({ assetType: 'triplane_density_slices', shape: shape, }); } return result; } function createEmptyTriplaneSlicesTexture(spec) { let textureType = spec.assetType.replace(/_slices$/, ''); let format = { 'triplane_density': THREE.RedFormat, 'triplane_rgb': THREE.RGBFormat, 'triplane_features': THREE.RGBAFormat, }[textureType]; console.assert(format != undefined, spec); let texture = createEmptyTriplaneTextureArray(...spec.shape, format); return { textureType, texture }; } function createEmptySparseGridTexture(spec) { let _createEmptyAtlasVolumeTexture = (spec, format) => { return createEmptyVolumeTexture(...spec.shape, format, THREE.LinearFilter); }; // Determine which spec to use for rgb and density. This will change // depending on which assets were generated. let rgbSpec = spec.separateRgbAndDensity ? spec.rgbSpec : spec.rgbAndDensitySpec; let densitySpec = spec.separateRgbAndDensity ? spec.densitySpec : spec.rgbAndDensitySpec; let sparseGridRgbTexture = _createEmptyAtlasVolumeTexture(rgbSpec, THREE.RGBFormat); let sparseGridDensityTexture = _createEmptyAtlasVolumeTexture(densitySpec, THREE.LuminanceAlphaFormat); let sparseGridFeaturesTexture = _createEmptyAtlasVolumeTexture(spec.featuresSpec, THREE.RGBFormat); // The indirection grid uses nearest filtering and is loaded in one go. // uint8[64,64,64], 3 bytes per entry let sparseGridBlockIndicesTexture = createEmptyVolumeTexture( ...spec.blockIndicesSpec.shape, THREE.RGBFormat, THREE.NearestFilter); // Update texture buffer for sparse_grid_block_indices. return { textureType: 'sparse_grid', blockIndicesTexture: { textureType: 'sparse_grid_block_indices', texture: sparseGridBlockIndicesTexture, }, rgbTexture: { textureType: 'sparse_grid_rgb', texture: sparseGridRgbTexture, }, densityTexture: { textureType: 'sparse_grid_density', texture: sparseGridDensityTexture, }, featuresTexture: { textureType: 'sparse_grid_features', texture: sparseGridFeaturesTexture, }, }; } const gCreateEmptyTextureRegistry = { 'scene': createEmptySceneTexture, // triplane 'triplane': createEmptyTriplaneTexture, 'triplane_rgb_slices': createEmptyTriplaneSlicesTexture, 'triplane_density_slices': createEmptyTriplaneSlicesTexture, 'triplane_features_slices': createEmptyTriplaneSlicesTexture, // distance grids 'distance_grids': createEmptyOccupancyGridsTexture, 'distance_grid_slices': createEmptyOccupancyGridTexture, // occupancy grids 'occupancy_grids': createEmptyOccupancyGridsTexture, 'occupancy_grid_slices': createEmptyOccupancyGridTexture, // sparse grid 'sparse_grid': createEmptySparseGridTexture, }; function createEmptyTexture(spec) { let loadFn = gCreateEmptyTextureRegistry[spec.assetType]; if (loadFn == undefined) { console.error( `Failed to find loadFn for assetType ${spec.assetType}`, spec); } return loadFn(spec); } // Copyright 2024 The Google Research Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview Populate WebGL textures with assets. */ /** * Populates all textures in a scene. */ function prepareScenePayload(asset) { return { textureType: 'scene', occupancyGridsPayload: prepareTexturePayload(asset.occupancyGridsAsset), distanceGridsPayload: prepareTexturePayload(asset.distanceGridsAsset), triplanePayload: prepareTexturePayload(asset.triplaneAsset), sparseGridPayload: prepareTexturePayload(asset.sparseGridAsset), } } function prepareOccupancyGridsPayload(asset) { let textureType = asset.assetType; let gridPayloads = asset.gridAssets.map(prepareTexturePayload); return {textureType, gridPayloads}; } function prepareOccupancyGridPayload(asset) { if (asset.mergeSlices) { return prepareOccupancyGridMergedPayload(asset); } else { return prepareOccupancyGridSlicesPayload(asset); } } function prepareOccupancyGridMergedPayload(asset) { console.assert(asset.assetType.endsWith('_slices'), asset); let textureType = asset.assetType.replace(/_slices$/, ''); let shape = asset.shape; let numChannels = asset.numChannels; let src = { 1: GridTextureSource.RED_FROM_RED, 4: GridTextureSource.ALPHA_FROM_RGBA, }[asset.numChannels]; let payload = mergeSlices(asset, src, GridTextureDestination.RED_IN_RED); return {textureType, shape, numChannels, payload}; } function prepareOccupancyGridSlicesPayload(asset) { console.assert(asset.assetType.endsWith('_slices'), asset); let textureType = asset.assetType.replace(/_slices$/, ''); return { textureType: textureType, shape: asset.shape, numChannels: asset.numChannels, slicePayloads: asset.sliceAssets.map(prepareTexturePayload), }; } function prepareOccupancyGridSlicePayload(asset) { console.assert(asset.assetType.endsWith('_slice'), asset); let payload = null; if (asset.numChannels == 1) { payload = asset.asset; } else if (asset.numChannels == 4) { payload = mergeSlices( { shape: asset.shape, numSlices: 1, sliceAssets: [{...asset, sliceIndex: 0, numSlices: 1}], }, GridTextureSource.ALPHA_FROM_RGBA, GridTextureDestination.RED_IN_RED, ); } else { throw new Error('Unrecognized number of input channels', asset); } return { textureType: asset.assetType, shape: asset.shape, numChannels: asset.numChannels, sliceIndex: asset.sliceIndex, numSlices: asset.numSlices, payload: payload, }; } function prepareTriplanePayload(asset) { let result = { textureType: 'triplane', featuresPayload: preparePlanePayload( asset.featuresAsset, 'triplane_features', GridTextureSource.RGBA_FROM_RGBA, GridTextureDestination.RGBA_IN_RGBA, ), }; if (asset.separateRgbAndDensity) { result.rgbPayload = preparePlanePayload( asset.rgbAsset, 'triplane_rgb', GridTextureSource.RGB_FROM_RGB, GridTextureDestination.RGB_IN_RGB, ); result.densityPayload = preparePlanePayload( asset.densityAsset, 'triplane_density', GridTextureSource.RED_FROM_RED, GridTextureDestination.RED_IN_RED, ); } else { result.rgbPayload = preparePlanePayload( asset.rgbAndDensityAsset, 'triplane_rgb', GridTextureSource.RGB_FROM_RGBA, GridTextureDestination.RGB_IN_RGB, ); result.densityPayload = preparePlanePayload( asset.rgbAndDensityAsset, 'triplane_density', GridTextureSource.ALPHA_FROM_RGBA, GridTextureDestination.RED_IN_RED, ); } return result; } function preparePlanePayload(asset, dstKey, src, dst) { let result = { textureType: dstKey, shape: asset.shape, numChannels: asset.numChannels, }; if (asset.mergeSlices) { result.payload = mergeSlices(asset, src, dst); } else { result.slicePayloads = asset.sliceAssets.map( (sliceAsset) => preparePlaneSlicePayload(sliceAsset, src, dst)); } return result; } function preparePlaneSlicePayload(asset, src, dst) { let payload = null; if (src.format == dst.format && src.channels == dst.channels) { payload = asset.asset; } else { payload = mergeSlices( { shape: asset.shape, numSlices: 1, sliceAssets: [{...asset, sliceIndex: 0, numSlices: 1}], }, src, dst); } return { textureType: asset.assetType, shape: asset.shape, sliceIndex: asset.sliceIndex, numSlices: asset.numSlices, numChannels: asset.numChannels, payload: payload }; } function prepareArrayPayload(asset) { return { textureType: asset.assetType, payload: asset.asset, shape: asset.shape, numChannels: asset.numChannels }; } function prepareSparseGridPayload(asset) { let result = { textureType: 'sparse_grid', blockIndicesPayload: prepareTexturePayload(asset.blockIndicesAsset), featuresPayload: prepareTexturePayload(asset.featuresAsset), }; if (asset.separateRgbAndDensity) { result.rgbPayload = prepareTexturePayload(asset.rgbAsset); result.densityPayload = prepareTexturePayload(asset.densityAsset); } else { result.rgbPayload = prepareTexturePayload(asset.rgbAndDensityAsset); result.densityPayload = prepareSparseGridDensityPayload(asset); } return result; } function prepareSparseGridGenericPayload(asset) { if (asset.mergeSlices) { return prepareSparseGridGenericMergedPayload(asset); } else { return prepareSparseGridGenericSlicesPayload(asset); } } function prepareSparseGridGenericMergedPayload(asset) { let textureType = asset.assetType.replace(/_slices$/, ''); if (textureType.includes('rgb_and_density')) { textureType = textureType.replace(/rgb_and_density$/, 'rgb'); } let srcOpt = { 2: GridTextureSource.LA_FROM_LUMINANCE_ALPHA, 3: GridTextureSource.RGB_FROM_RGB, 4: GridTextureSource.RGB_FROM_RGBA }; let dstOpt = { 2: GridTextureDestination.LA_IN_LUMINANCE_ALPHA, 3: GridTextureDestination.RGB_IN_RGB, 4: GridTextureDestination.RGB_IN_RGB }; let payload = mergeSlices( asset, srcOpt[asset.numChannels], dstOpt[asset.numChannels], ); return { textureType: textureType, shape: asset.shape, numChannels: asset.numChannels, payload: payload, }; } function prepareSparseGridGenericSlicesPayload(asset) { let textureType = asset.assetType.replace(/_slices$/, ''); if (textureType.includes('rgb_and_density')) { textureType = textureType.replace(/rgb_and_density$/, 'rgb'); } return { textureType: textureType, shape: asset.shape, numChannels: asset.numChannels, numSlices: asset.numSlices, slicePayloads: asset.sliceAssets.map(prepareTexturePayload), }; } function prepareSparseGridGenericSlicePayload(asset) { // This payload corresponds to a *slice* of a texture. let assetType = asset.assetType; let payload = null; if ((assetType == 'sparse_grid_rgb_slice' && asset.numChannels == 3) || (assetType == 'sparse_grid_density_slice' && asset.numChannels == 2) || (assetType == 'sparse_grid_features_slice' && asset.numChannels == 3)) { payload = asset.asset; } else { let srcOpts = { 2: GridTextureSource.LA_FROM_LUMINANCE_ALPHA, 3: GridTextureSource.RGB_FROM_RGB, 4: GridTextureSource.RGB_FROM_RGBA }; let dstOpts = { 2: GridTextureDestination.LA_IN_LUMINANCE_ALPHA, 3: GridTextureDestination.RGB_IN_RGB, 4: GridTextureDestination.RGB_IN_RGB }; payload = mergeSlices( { shape: asset.shape, numSlices: 1, sliceAssets: [{...asset, sliceIndex: 0, numSlices: 1}], }, srcOpts[asset.numChannels], dstOpts[asset.numChannels]); } // Only the rgb part of rgb_and_density is extracted. let textureType = assetType; if (textureType.includes('rgb_and_density')) { textureType = textureType.replace(/_rgb_and_density$/, 'rgb'); } // A slice requires no further processing. return { textureType: textureType, shape: asset.shape, numChannels: asset.numChannels, sliceIndex: asset.sliceIndex, numSlices: asset.numSlices, payload: payload, }; } /** * Populates the sparse grid's density texture using the alpha channel from two * data sources. This is only necessary if separateRgbAndDensity is false. */ function prepareSparseGridDensityPayload(asset) { return { textureType: 'sparse_grid_density', shape: asset.rgbAndDensityAsset.shape, numChannels: 2, payload: mergeSparseGridDensity(asset), }; } const gPrepareTexturePayloadRegistry = { 'scene': prepareScenePayload, // triplane 'triplane': prepareTriplanePayload, // distance grids 'distance_grids': prepareOccupancyGridsPayload, 'distance_grid_slices': prepareOccupancyGridPayload, 'distance_grid_slice': prepareOccupancyGridSlicePayload, // occupancy grids 'occupancy_grids': prepareOccupancyGridsPayload, 'occupancy_grid_slices': prepareOccupancyGridPayload, 'occupancy_grid_slice': prepareOccupancyGridSlicePayload, // sparse grid 'sparse_grid': prepareSparseGridPayload, 'sparse_grid_block_indices': prepareArrayPayload, 'sparse_grid_rgb_and_density_slices': prepareSparseGridGenericPayload, 'sparse_grid_rgb_and_density_slice': prepareSparseGridGenericSlicePayload, 'sparse_grid_rgb_slices': prepareSparseGridGenericPayload, 'sparse_grid_rgb_slice': prepareSparseGridGenericSlicePayload, 'sparse_grid_density_slices': prepareSparseGridGenericPayload, 'sparse_grid_density_slice': prepareSparseGridGenericSlicePayload, 'sparse_grid_features_slices': prepareSparseGridGenericPayload, 'sparse_grid_features_slice': prepareSparseGridGenericSlicePayload, }; function prepareTexturePayload(asset) { let loadFn = gPrepareTexturePayloadRegistry[asset.assetType]; if (loadFn == undefined) { console.error( `Failed to find loadFn for assetType ${asset.assetType}`, asset); } return loadFn(asset); } // Copyright 2024 The Google Research Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview Populate WebGL textures with assets. */ /** * Populates all textures in a scene. */ function populateScene(texture, payload) { let promises = [ populateTexture(texture.occupancyGridsTexture, payload.occupancyGridsPayload), populateTexture(texture.distanceGridsTexture, payload.distanceGridsPayload), populateTexture(texture.triplaneTexture, payload.triplanePayload), populateTexture(texture.sparseGridTexture, payload.sparseGridPayload), ]; return Promise.all(promises); } /** * Populates all occupancy grid textures. */ function populateOccupancyGridsTexture(texture, payload) { console.assert( texture.gridTextures.length == payload.gridPayloads.length, texture, payload, ); let numGridTextures = texture.gridTextures.length; let promises = range(numGridTextures).map((i) => { return populateTexture(texture.gridTextures[i], payload.gridPayloads[i]); }); return Promise.all(promises); } /** * Populates all textures related to the sparse grid. */ function populateSparseGridTexture(texture, payload) { let promises = [ populateTexture(texture.blockIndicesTexture, payload.blockIndicesPayload), populateTexture(texture.rgbTexture, payload.rgbPayload), populateTexture(texture.densityTexture, payload.densityPayload), populateTexture(texture.featuresTexture, payload.featuresPayload), ]; return Promise.all(promises); } /** * Populates all triplane textures. */ function populateTriplaneTexture(texture, payload) { return Promise.all([ populateTexture(texture.rgbTexture, payload.rgbPayload), populateTexture(texture.densityTexture, payload.densityPayload), populateTexture(texture.featuresTexture, payload.featuresPayload), ]); } /** * Populates a single monolithic texture. */ async function populateArrayTexture(texture, payload) { if (payload.payload != null) { texture.texture.image.data = await payload.payload; texture.texture.needsUpdate = true; } else { throw new Error('Unclear how to ingest payload', texture, payload); } } /** * Populates a texture with or without slices. */ function populateArrayTextureWithWebGL(texture, payload) { if (payload.payload != null) { return populateArrayTextureMergedWithWebGL(texture, payload); } else if (payload.slicePayloads != null) { let promises = payload.slicePayloads.map( (slicePayload) => populateArrayTextureSliceWithWebGL(texture, slicePayload)); return Promise.all(promises); } else { throw new Error('Unclear how to ingest payload', texture, payload); } } /** * Populate's a slice of a target texture using the WebGL API. */ async function populateArrayTextureSliceWithWebGL(texture, payload) { let gl = gRenderer.getContext(); const volumeWidth = payload.shape[0]; const volumeHeight = payload.shape[1]; const sliceDepth = payload.shape[2]; const sliceIndex = payload.sliceIndex; let threeFormat = texture.texture.format; let {glFormat, glInternalFormat, numChannels} = threeFormatToOpenGLFormat(gl, threeFormat); // Wait for data to be ready. let srcData = await payload.payload; // Set target texture as OpenGL's current texture. const textureProperties = gRenderer['properties'].get(texture.texture); let newTexture = textureProperties['__webglTexture']; console.assert(newTexture != null, texture); // Both 3D textures and 2D texture arrays use gl.texSubImage3D, but different // context arguments are required. let glTextureBinding, glTextureType; if (texture.texture instanceof THREE.DataTexture3D) { glTextureBinding = gl.TEXTURE_BINDING_3D; glTextureType = gl.TEXTURE_3D; } else if (texture.texture instanceof THREE.DataTexture2DArray) { glTextureBinding = gl.TEXTURE_BINDING_2D_ARRAY; glTextureType = gl.TEXTURE_2D_ARRAY; } let oldTexture = gl.getParameter(glTextureBinding); gl.bindTexture(glTextureType, newTexture); let start = performance.mark(`${texture.textureType}-start`); gl.texSubImage3D( glTextureType, // target 0, // level 0, // xoffset 0, // yoffset sliceIndex * sliceDepth, // zoffset volumeWidth, // width volumeHeight, // height sliceDepth, // depth glFormat, // format gl.UNSIGNED_BYTE, // type srcData, // srcData 0, // srcOffset ); let end = performance.mark(`${texture.textureType}-end`); performance.measure( `${texture.textureType}-duration`, `${texture.textureType}-start`, `${texture.textureType}-end`, ) gl.bindTexture(glTextureType, oldTexture); } async function populateArrayTextureMergedWithWebGL(texture, payload) { return populateArrayTextureSliceWithWebGL( texture, {...payload, sliceIndex: 0, numSlices: 1}); } /** * Converts THREE.js's texture format to WebGL's. */ function threeFormatToOpenGLFormat(gl, threeFormat) { if (threeFormat == THREE.RGBAFormat) { return { numChannels: 4, glFormat: gl.RGBA, glInternalFormat: gl.RGBA, }; } else if (threeFormat == THREE.RGBFormat) { return { numChannels: 3, glFormat: gl.RGB, glInternalFormat: gl.RGB, }; } else if (threeFormat == THREE.LuminanceAlphaFormat) { return { numChannels: 2, glFormat: gl.LUMINANCE_ALPHA, glInternalFormat: gl.LUMINANCE_ALPHA }; } else if (threeFormat == THREE.RedFormat) { return { numChannels: 1, glFormat: gl.RED, glInternalFormat: gl.R8, }; } else { throw new Error(`Unrecognized three format: ${threeFormat}`); } } /** * Registry for functions that can be reached via populateTexture(). */ const gPopulateTextureRegistry = { 'scene': populateScene, // triplane 'triplane': populateTriplaneTexture, 'triplane_rgb': populateArrayTextureWithWebGL, 'triplane_density': populateArrayTextureWithWebGL, 'triplane_features': populateArrayTextureWithWebGL, // distance grids 'distance_grids': populateOccupancyGridsTexture, 'distance_grid': populateArrayTextureWithWebGL, // occupancy grids 'occupancy_grids': populateOccupancyGridsTexture, 'occupancy_grid': populateArrayTextureWithWebGL, // sparse grid 'sparse_grid': populateSparseGridTexture, 'sparse_grid_block_indices': populateArrayTextureWithWebGL, 'sparse_grid_rgb': populateArrayTextureWithWebGL, 'sparse_grid_density': populateArrayTextureWithWebGL, 'sparse_grid_features': populateArrayTextureWithWebGL, }; /** * Entry point for populating textures. */ function populateTexture(texture, payload) { let loadFn = gPopulateTextureRegistry[texture.textureType]; if (loadFn == undefined) { console.error( `Failed to find loadFn for assetType ${texture.textureType}`, texture); } return loadFn(texture, payload); } // Copyright 2024 The Google Research Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview Description of this file. */ /** * Possible states for a ray march texture buffer. */ const NEEDS_NEW_SUBMODEL = -1; const LOADING = 0; const READY = 1; /** * Mapping from integers to registered submodel scene content. */ let gSubmodelSceneContents = {}; /** * Maximum number of payloads to keep in memory. Use this to limit Host * RAM consumption. */ let gSubmodelCacheSize = 10; /** * Ping pong buffers. texture will be initialized to cotnain a structure * matching that of create_texture.js */ let gRayMarchTextureBuffers = [ {si: 0, state: NEEDS_NEW_SUBMODEL, texture: null}, {si: 0, state: NEEDS_NEW_SUBMODEL, texture: null}, ]; /** * Index of the active entry in gRayMarchTextureBuffers; */ let gActiveRayMarchTextureBuffer = 0; /** * Three.js scene instance. The same instance is used for all submodels. */ let gRayMarchScene = null; /** * Get the global three.js scene. */ function getRayMarchScene() { if (gRayMarchScene == null) { throw new Error('gRayMarchScene has not been initialized yet!'); } return gRayMarchScene; } /** * Get the index of the active submodel. */ function getActiveSubmodelIndex() { return gRayMarchTextureBuffers[gActiveRayMarchTextureBuffer].si; } /** * Get the content of the active submodel. */ function getActiveSubmodelContent() { return getSubmodelContent(getActiveSubmodelIndex()); } /** * Get the scale of a submodel */ function getSubmodelScale(si) { let content = getSubmodelContent(si); let submodelScale = content.params['submodel_scale']; return submodelScale; } /** * Get the scale factor of a submodel. */ function getSubmodelScaleFactor(si) { let content = getSubmodelContent(si); let submodelScale = getSubmodelScale(si); let submodelResolution = Math.cbrt(content.params['num_submodels']); let submodelScaleFactor = submodelScale / submodelResolution; return submodelScaleFactor; } /** * Get the content for a submodel. */ function getSubmodelContent(si) { return gSubmodelSceneContents[si]; } /** * Register content for a submodel. */ function registerSubmodelContent(i, content) { gSubmodelSceneContents[i] = content; } /** * Get global Deferred MLP parameters. */ function getDeferredMlp() { console.assert(gDeferredMlp != null); return gDeferredMlp; } /** * Set the global Deferred MLP parameters. */ function registerDeferredMlp(deferredMlp) { validateDeferredMlp(deferredMlp); gDeferredMlp = deferredMlp; } /** * Get size of allocated texture assets. */ function getCurrentTextureUsageInBytes() { let numBytes = 0; for (rmtb of gRayMarchTextureBuffers) { numBytes += getTextureSizeInBytes(rmtb.texture); } return numBytes; } /** * Attempt to set the active submodel. * * This operation is best-effort: if the requested submodel's textures are * ready, it will switch to them. If they are not, it will start the process * of preparing them. Call getActiveSubmodelIndex() ater this function to * determine if this call succeeded or not. */ function setCurrentRayMarchScene(si) { let activeBufferIdx = gActiveRayMarchTextureBuffer; let activeBuffer = gRayMarchTextureBuffers[activeBufferIdx]; let otherBufferIdx = (activeBufferIdx + 1) % 2; let otherBuffer = gRayMarchTextureBuffers[otherBufferIdx]; if (getSubmodelContent(si) == null) { // Requested submodel doesn't exist. Don't attempt to load it. return Promise.resolve(); } // Update for LRU cache. getSubmodelContent(si).lastTouched = Date.now(); if (si == activeBuffer.si && activeBuffer.state >= LOADING) { // Wait for this buffer to finish loading. return Promise.resolve(); } if (si == otherBuffer.si && otherBuffer.state == READY) { // Switch to other buffer. console.log(`Switching to buffer ${otherBufferIdx} for submodel #${si}`); let sceneContent = getSubmodelContent(si); setTextureUniforms(sceneContent.params, otherBuffer.texture); gActiveRayMarchTextureBuffer = otherBufferIdx; return Promise.resolve(); } if (otherBuffer.state >= LOADING && otherBuffer.state < READY) { // The other buffer is busy loading. Don't try to claim it. return Promise.resolve(); } // Claim this buffer and start loading it. console.log( `Preparing texture buffer #${otherBufferIdx} for submodel #${si}`); otherBuffer.si = si; otherBuffer.state = LOADING; return Promise.resolve() .then(() => { // Prepare texture buffers for use. reinitializeSparseGridTextures(otherBuffer); // Fetch assets now if they haven't been already. let content = getSubmodelContent(otherBuffer.si); if (content.payload == null) { console.log(`Fetching assets for submodel #${otherBuffer.si}`); let asset = fetchAsset(content.spec, content.router); let payload = prepareTexturePayload(asset); content.payload = payload; } // Populate texture with assets. console.log(`Populating textures for submodel #${ otherBuffer.si} into buffer #${otherBufferIdx}`); return populateTexture(otherBuffer.texture, content.payload); }).then(() => { otherBuffer.state = READY; console.log(`Submodel #${otherBuffer.si} is ready for rendering`); hideLoading(); }); } /** * Limit the number of submodel payloads in Host RAM. */ function garbageCollectSubmodelPayloads() { // Draw up a list of candidate submodels to delete. Anything that has a // payload is a candidate. let candidates = []; for (let si of Object.keys(gSubmodelSceneContents)) { let content = getSubmodelContent(si); if (content.payload == null) { continue; } candidates.push({ lastTouched: content.lastTouched || 0, si: si, }); } // Sort submodel idxs by last touched, oldest first. let oldestFirst = (a, b) => { return a.lastTouched - b.lastTouched; }; candidates.sort(oldestFirst); // Delete payload field from old submodels. for (let i = 0; i < candidates.length - gSubmodelCacheSize; ++i) { let si = candidates[i].si; console.log(`Deleting payload for submodel #${si}`); getSubmodelContent(si).payload = null; } } /** * Initialize scene content. This is a lightweight operation. */ function initializeSceneContent(sceneParams, router) { return { spec: createSceneSpec(sceneParams), params: sceneParams, router: router, payload: null, }; } /** * Re-initializes texture buffers for sparse grid. These textures are not * reusable yet, as their shape can vary between submodels. */ function reinitializeSparseGridTextures(rmtb) { let texture = rmtb.texture.sparseGridTexture; // Dispose of existing textures. texture.blockIndicesTexture.texture.dispose(); texture.rgbTexture.texture.dispose(); texture.densityTexture.texture.dispose(); texture.featuresTexture.texture.dispose(); // Create new textures. let sparseGridSpec = getSubmodelContent(rmtb.si).spec.sparseGridSpec; rmtb.texture.sparseGridTexture = createEmptyTexture(sparseGridSpec); } /** * Initializes ping-pong texture buffers. */ async function initializePingPongBuffers(si) { // Instantiate the three.js scene without textures. let sceneContent = getSubmodelContent(si); gRayMarchScene = await initializeRayMarchScene(si, sceneContent); // Instantiate textures for the ping pong buffers. for (let rmtb of gRayMarchTextureBuffers) { rmtb.texture = createEmptyTexture(sceneContent.spec); } // Assign texture uniforms from the first buffer to the scene. setTextureUniforms(sceneContent.params, gRayMarchTextureBuffers[0].texture); gActiveRayMarchTextureBuffer = 0; } async function initializeDeferredMlp(si) { // Instantiate the three.js scene without textures. let sceneContent = getSubmodelContent(si); let sceneParams = sceneContent.params; if (sceneParams['export_store_deferred_mlp_separately']) { let url = sceneContent.router.translate('deferred_mlp.json'); return loadJSONFile(url).then(registerDeferredMlp); } // DeferredMLP is stored in sceneParams. return registerDeferredMlp(sceneParams['deferred_mlp']); } /** * Assign sceneTexture's texture assets to the global ray march scene's * uniforms. */ function setTextureUniforms(sceneParams, sceneTexture) { let rayMarchUniforms = getRayMarchScene().children[0].material.uniforms; // Occupancy grids let occupancyGridTextures = sceneTexture.occupancyGridsTexture.gridTextures; let numOccupancyGrids = occupancyGridTextures.length; for (let i = 0; i < numOccupancyGrids; ++i) { let texture = occupancyGridTextures[i]; let ri = numOccupancyGrids - i - 1; rayMarchUniforms['occupancyGrid_L' + ri]['value'] = texture.texture; } // Distance grid if (sceneParams['useDistanceGrid']) { let texture = sceneTexture.distanceGridsTexture.gridTextures[0].texture; rayMarchUniforms['distanceGrid']['value'] = texture; } // triplane let triplaneTexture = sceneTexture.triplaneTexture; rayMarchUniforms['planeDensity']['value'] = triplaneTexture.densityTexture.texture; rayMarchUniforms['planeRgb']['value'] = triplaneTexture.rgbTexture.texture; rayMarchUniforms['planeFeatures']['value'] = triplaneTexture.featuresTexture.texture; // sparse grid let sparseGridTexture = sceneTexture.sparseGridTexture; rayMarchUniforms['sparseGridBlockIndices']['value'] = sparseGridTexture.blockIndicesTexture.texture; rayMarchUniforms['sparseGridDensity']['value'] = sparseGridTexture.densityTexture.texture; rayMarchUniforms['sparseGridRgb']['value'] = sparseGridTexture.rgbTexture.texture; rayMarchUniforms['sparseGridFeatures']['value'] = sparseGridTexture.featuresTexture.texture; rayMarchUniforms['atlasSize']['value'] = new THREE.Vector3( sceneParams['atlas_width'], sceneParams['atlas_height'], sceneParams['atlas_depth'], ); } function getTextureSizeInBytes(sceneTexture) { let numBytes = 0.0; let getTextureSize = (texture) => { if (texture == null) { return 0; } let image = texture.texture.image; return image.height * image.width * image.depth; }; // Occupancy grids let occupancyGridTextures = sceneTexture.occupancyGridsTexture.gridTextures; let numOccupancyGrids = occupancyGridTextures.length; for (let i = 0; i < numOccupancyGrids; ++i) { let texture = occupancyGridTextures[i]; numBytes += getTextureSize(texture) * 1; } // Distance grid if (sceneTexture.distanceGridsTexture.gridTextures.length > 0) { let texture = sceneTexture.distanceGridsTexture.gridTextures[0]; numBytes += getTextureSize(texture) * 1; } // triplane let triplaneTexture = sceneTexture.triplaneTexture; numBytes += getTextureSize(triplaneTexture.rgbTexture) * 3; numBytes += getTextureSize(triplaneTexture.densityTexture) * 1; numBytes += getTextureSize(triplaneTexture.featuresTexture) * 4; // sparse grid let sparseGridTexture = sceneTexture.sparseGridTexture; numBytes += getTextureSize(sparseGridTexture.blockIndicesTexture) * 1; numBytes += getTextureSize(sparseGridTexture.rgbTexture) * 3; numBytes += getTextureSize(sparseGridTexture.densityTexture) * 1; numBytes += getTextureSize(sparseGridTexture.featuresTexture) * 4; return numBytes; } /** * Initializes global ray march scene using a reference submodel's scene * content. * * Uniforms for texture buffers are set to null. We assume that the reference * submodel content is more-or-less identical across all submodels. No reference * is made to the shape of texture assets. */ async function initializeRayMarchScene(si, sceneContent) { let sceneParams = sceneContent.params; let sceneSpec = sceneContent.spec; // Assemble shader code from header, on-the-fly generated view-dependency // functions and body. let fragmentShaderSource = kRayMarchFragmentShaderHeader; fragmentShaderSource += await loadTextFile('viewdependency.glsl'); fragmentShaderSource += await loadTextFile('fragment.glsl'); fragmentShaderSource = rewriteViewDependenceDefinitions(sceneParams, fragmentShaderSource); let worldspaceROpengl = new THREE.Matrix3(); worldspaceROpengl.set(-1, 0, 0, 0, 0, 1, 0, 1, 0); let minPosition = new THREE.Vector3(-2.0, -2.0, -2.0); // Hard code these values as constants rather than uniforms --- they // are not changing between submodels, and hardcoded constants allow // optiming shader compilers to do better work. fragmentShaderSource = '#define kMinPosition vec3(' + Number(minPosition.x).toFixed(10) + ', ' + Number(minPosition.y).toFixed(10) + ', ' + Number(minPosition.z).toFixed(10) + ')\n' + fragmentShaderSource; fragmentShaderSource = '#define kSubmodelScale ' + Number(getSubmodelScale(si)).toFixed(10) + '\n' + fragmentShaderSource; fragmentShaderSource = '#define kStepMult ' + gStepMult + '\n' + fragmentShaderSource; fragmentShaderSource = '#define kRangeFeaturesMin ' + Number(sceneParams['range_features'][0]).toFixed(10) + '\n' + fragmentShaderSource; fragmentShaderSource = '#define kRangeFeaturesMax ' + Number(sceneParams['range_features'][1]).toFixed(10) + '\n' + fragmentShaderSource; fragmentShaderSource = '#define kRangeDensityMin ' + Number(sceneParams['range_density'][0]).toFixed(10) + '\n' + fragmentShaderSource; fragmentShaderSource = '#define kRangeDensityMax ' + Number(sceneParams['range_density'][1]).toFixed(10) + '\n' + fragmentShaderSource; // Pass uniforms to the shader. These are the variables that will be // manipulated over the course of rendering. let rayMarchUniforms = { // Neural network weights. 'bias_0': {'value': null}, 'bias_1': {'value': null}, 'bias_2': {'value': null}, 'weightsZero': {'value': null}, 'weightsOne': {'value': null}, 'weightsTwo': {'value': null}, // General rendering parameters. 'displayMode': {'value': gDisplayMode - 0}, 'minPosition': {'value': minPosition}, 'world_T_cam': {'value': new THREE.Matrix4()}, 'cam_T_clip': {'value': new THREE.Matrix4()}, 'worldspaceROpengl': {'value': worldspaceROpengl}, }; occupancyUniforms = {}; let occupancyGridSpecs = sceneSpec.occupancyGridsSpec.gridSpecs; let numOccupancyGrids = occupancyGridSpecs.length; for (let i = 0; i < numOccupancyGrids; ++i) { // Initialize occupancy grid shader let spec = occupancyGridSpecs[i]; let ri = numOccupancyGrids - i - 1; fragmentShaderSource = '#define kVoxelSizeOccupancy_L' + ri + ' ' + Number(spec.voxelSize).toFixed(10) + '\n' + fragmentShaderSource; fragmentShaderSource = '#define kGridSizeOccupancy_L' + ri + ' vec3(' + Number(spec.shape[0]).toFixed(10) + ', ' + Number(spec.shape[1]).toFixed(10) + ', ' + Number(spec.shape[2]).toFixed(10) + ')\n' + fragmentShaderSource; // Initialize occupancy grid uniforms occupancyUniforms['occupancyGrid_L' + ri] = {'value': null}; } rayMarchUniforms = extend(rayMarchUniforms, occupancyUniforms); if (sceneParams['useDistanceGrid']) { // Initialize distance grid shader let spec = sceneSpec.distanceGridsSpec.gridSpecs[0]; fragmentShaderSource = '#define USE_DISTANCE_GRID\n' + fragmentShaderSource; fragmentShaderSource = '#define kVoxelSizeDistance ' + Number(spec.voxelSize).toFixed(10) + '\n' + fragmentShaderSource; fragmentShaderSource = '#define kGridSizeDistance vec3(' + Number(spec.shape[0]).toFixed(10) + ', ' + Number(spec.shape[1]).toFixed(10) + ', ' + Number(spec.shape[2]).toFixed(10) + ')\n' + fragmentShaderSource; // Initialize distance grid uniforms distanceUniforms = {'distanceGrid': {'value': null}}; rayMarchUniforms = extend(rayMarchUniforms, distanceUniforms); } let backgroundColor = new THREE.Color(0.5, 0.5, 0.5); if (sceneParams['backgroundColor']) { backgroundColor = new THREE.Color(sceneParams['backgroundColor']); } fragmentShaderSource = '#define kBackgroundColor vec3(' + Number(backgroundColor.r).toFixed(10) + ', ' + Number(backgroundColor.g).toFixed(10) + ', ' + Number(backgroundColor.b).toFixed(10) + ')\n' + fragmentShaderSource; if (gExposure || sceneParams['default_exposure']) { if (sceneParams['default_exposure']) { gExposure = parseFloat(sceneParams['default_exposure']); } fragmentShaderSource = '#define USE_EXPOSURE\n' + fragmentShaderSource; exposureUniforms = {'exposure': {'value': gExposure}}; rayMarchUniforms = extend(rayMarchUniforms, exposureUniforms); } const activation = sceneParams['activation'] ? sceneParams['activation'] : 'elu'; fragmentShaderSource = '#define ACTIVATION_FN ' + activation + '\n' + fragmentShaderSource; if (sceneParams['feature_gating'] === null || sceneParams['feature_gating'] === undefined || sceneParams['feature_gating'] === true) { fragmentShaderSource = '#define USE_FEATURE_GATING\n' + fragmentShaderSource; } if (sceneParams['deferred_rendering_mode'] === 'vfr') { fragmentShaderSource = '#define USE_VFR\n' + fragmentShaderSource; } if (sceneParams['merge_features_combine_op'] === 'coarse_sum') { fragmentShaderSource = '#define USE_FEATURE_CONCAT\n' + fragmentShaderSource; } if (sceneParams['useBits']) { fragmentShaderSource = '#define USE_BITS\n' + fragmentShaderSource; } if (sceneParams['useLargerStepsWhenOccluded']) { fragmentShaderSource = '#define LARGER_STEPS_WHEN_OCCLUDED\n' + fragmentShaderSource; fragmentShaderSource = '#define kVisibilityDelay ' + Number(sceneParams['step_size_visibility_delay']).toFixed(10) + '\n' + fragmentShaderSource; } // Initialize triplane shader let triplaneGridSize = new THREE.Vector2(...sceneSpec.triplaneSpec.shape); fragmentShaderSource = '#define kTriplaneVoxelSize ' + Number(sceneParams['triplane_voxel_size']).toFixed(10) + '\n' + fragmentShaderSource; fragmentShaderSource = '#define kTriplaneGridSize vec2(' + Number(triplaneGridSize.x).toFixed(10) + ', ' + Number(triplaneGridSize.y).toFixed(10) + ')\n' + fragmentShaderSource; // Initialize triplane uniforms let triplaneUniforms = { 'planeDensity': {'value': null}, 'planeRgb': {'value': null}, 'planeFeatures': {'value': null}, }; rayMarchUniforms = extend(rayMarchUniforms, triplaneUniforms); fragmentShaderSource = '#define kDataBlockSize ' + Number(sceneParams['data_block_size']).toFixed(10) + '\n' + fragmentShaderSource; fragmentShaderSource = '#define kSparseGridVoxelSize ' + Number(sceneParams['sparse_grid_voxel_size']).toFixed(10) + '\n' + fragmentShaderSource; fragmentShaderSource = '#define kSparseGridGridSize vec3(' + Number(sceneParams['sparse_grid_resolution']).toFixed(10) + ', ' + Number(sceneParams['sparse_grid_resolution']).toFixed(10) + ', ' + Number(sceneParams['sparse_grid_resolution']).toFixed(10) + ')\n' + fragmentShaderSource; let sparseGridUniforms = { 'sparseGridBlockIndices': {'value': null}, 'sparseGridDensity': {'value': null}, 'sparseGridRgb': {'value': null}, 'sparseGridFeatures': {'value': null}, 'atlasSize': {'value': null}, }; rayMarchUniforms = extend(rayMarchUniforms, sparseGridUniforms); // Bundle uniforms, vertex and fragment shader in a material let rayMarchMaterial = new THREE.ShaderMaterial({ uniforms: rayMarchUniforms, vertexShader: kRayMarchVertexShader, fragmentShader: fragmentShaderSource, vertexColors: true, }); rayMarchMaterial.side = THREE.DoubleSide; rayMarchMaterial.depthTest = false; rayMarchMaterial.needsUpdate = true; const plane = new THREE.PlaneBufferGeometry(...gViewportDims); let mesh = new THREE.Mesh(plane, rayMarchMaterial); mesh.position.z = -100; mesh.frustumCulled = false; let scene = new THREE.Scene(); scene.add(mesh); scene.autoUpdate = false; return scene; } /** * Validates shape of DeferredMLP parameters. */ function validateDeferredMlp(deferredMlp) { const mlpName = !!deferredMlp['ResampleDense_0/kernel'] ? 'ResampleDense' : 'Dense'; // WARNING: There must be EXACTLY three ResampleDense layers in the // DeferredMLP!! for (let li = 0; li < 3; li++) { const layerName = `${mlpName}_${li}`; let kernelShape = deferredMlp[`${layerName}/kernel`]['shape']; let biasShape = deferredMlp[`${layerName}/bias`]['shape']; if (mlpName === 'ResampleDense') { let gridSize = kernelShape[1]; // We assume that all grid dimensions are identical console.assert( gridSize === kernelShape[2] && gridSize === kernelShape[3]); // We also require the grid shape and the bias shape to match. console.assert( kernelShape[0] === biasShape[0] && kernelShape[1] === biasShape[1] && kernelShape[2] === biasShape[2] && kernelShape[3] === biasShape[3]); } } } // Copyright 2024 The Google Research Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview Description of this file. */ /** * A web worker for parsing binary assets in a separate thread. * @type {*} */ /** * A singleton managing a collection of web workers. */ class WorkerPool { /** * Initializes a WorkerPool */ constructor(numWorkers, filename) { let that = this; numWorkers = numWorkers || 2; // Create a pool of workers. this.workers = []; for (let i = 0; i < numWorkers; ++i) { let worker = new Worker(filename); worker.onmessage = (e) => { that.onmessage(e); }; this.workers.push(worker); } this.nextworker = 0; this.callbacks = {}; this.i = 0; } /** * Submit task to web worker. */ submit(request, callback) { const i = this.i; this.callbacks[i] = callback; this.i += 1; const w = this.nextworker; const worker = this.workers[w]; this.nextworker = (w + 1) % this.workers.length; worker.postMessage({i, request}); } /** * Callback for this.worker. */ onmessage(e) { const response = e.data; const i = response.i; const callback = this.callbacks[i]; callback(response.result); delete this.callbacks[i]; } } // Copyright 2024 The Google Research Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview Global state of the web viewer. */ /** * Our framerate display. * @type {?Object} */ let gStats = null; /** * If enabled, expect multiple submodels. */ let gUseSubmodel = false; /** * Transform from world coordinates to the current submodel. */ let gSubmodelTransform = null; /** * Deferred MLP parameters for the current submodel. */ let gDeferredMlp = null; /** * Different display modes for debugging rendering. * @enum {number} */ const DisplayModeType = { /** Runs the full model with view dependence. */ DISPLAY_NORMAL: 0, /** Disables the view-dependence network. */ DISPLAY_DIFFUSE: 1, /** Only shows the latent features. */ DISPLAY_FEATURES: 2, /** Only shows the view dependent component. */ DISPLAY_VIEW_DEPENDENT: 3, /** Only shows the coarse block grid. */ DISPLAY_COARSE_GRID: 4, }; /** @type {!DisplayModeType} */ let gDisplayMode = DisplayModeType.DISPLAY_NORMAL; /** * If true we evaluate run-time performance by re-rendering test viewpoints. * @type {boolean} */ let gBenchmark = false; /** * For benchmarking with vsync on: render this many redundant images per frame. * @type {number} */ let gFrameMult = 1; /** * A web worker for parsing binary assets in a separate thread. * @type {*} */ let gLoadAssetsWorker = new WorkerPool(4, "loadpng.worker.js"); /** * A web worker for merging slices together. * @type {*} */ let gCopySliceWorker = new WorkerPool(4, "copyslices.worker.js"); /** * The vertex shader for rendering a baked MERF scene with ray marching. * @const {string} */ const kRayMarchVertexShader = ` varying vec3 vOrigin; varying vec3 vDirection; uniform mat4 world_T_cam; uniform mat4 cam_T_clip; void main() { vec4 posClip = projectionMatrix * modelViewMatrix * vec4(position, 1.0); gl_Position = posClip; posClip /= posClip.w; vec4 originCam = vec4(0.0, 0.0, 0.0, 1.0); vec4 nearPointCam = cam_T_clip * vec4(posClip.x, posClip.y, -1.0, 1.0); nearPointCam /= -nearPointCam.z; vec4 originWorld = world_T_cam * originCam; vec4 nearPointWorld = world_T_cam * nearPointCam; vOrigin = originWorld.xyz / originWorld.w; vDirection = nearPointWorld.xyz / nearPointWorld.w - vOrigin; } `; /** * We build the ray marching shader programmatically, this string contains the * header for the shader. * @const {string} */ const kRayMarchFragmentShaderHeader = ` precision highp float; varying vec3 vOrigin; varying vec3 vDirection; uniform int displayMode; uniform mat3 worldspaceROpengl; uniform float nearPlane; #ifdef USE_DISTANCE_GRID uniform highp sampler3D distanceGrid; uniform highp sampler3D occupancyGrid_L0; #else uniform highp sampler3D occupancyGrid_L0; uniform highp sampler3D occupancyGrid_L1; uniform highp sampler3D occupancyGrid_L2; #ifndef USE_BITS uniform highp sampler3D occupancyGrid_L3; uniform highp sampler3D occupancyGrid_L4; #endif #endif uniform vec4 bias_0[NUM_CHANNELS_ONE/4]; uniform vec4 bias_1[NUM_CHANNELS_TWO/4]; uniform vec4 bias_2[NUM_CHANNELS_THREE/4]; uniform highp sampler2D weightsZero; uniform highp sampler2D weightsOne; uniform highp sampler2D weightsTwo; #ifdef USE_EXPOSURE uniform float exposure; #endif uniform vec3 atlasSize; uniform highp sampler3D sparseGridBlockIndices; uniform highp sampler3D sparseGridDensity; uniform highp sampler3D sparseGridRgb; uniform highp sampler3D sparseGridFeatures; // need to use texture arrays, otherwise we exceed max texture unit limit uniform highp sampler2DArray planeDensity; uniform highp sampler2DArray planeRgb; uniform highp sampler2DArray planeFeatures; `; /** * The THREE.js renderer object we use. * @type {?THREE.WebGLRenderer} */ let gRenderer = null; /** * The number of submodels */ let gSubmodelCount = 1; /** * The perspective camera we use to view the scene. * @type {?THREE.PerspectiveCamera} */ let gCamera = null; let gViewportDims = [640, 480]; // Copyright 2024 The Google Research Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview Miscellaneous utilities for the webviewer. */ /** * Number of textures being fetched. * @type {number} */ let gNumTextures = 0; /** * Number of already loaded textures. * @type {number} */ let gLoadedTextures = 0; /** * Allows forcing specific submodel for debugging */ let gSubmodelForceIndex = -1; /** * Extends a dictionary. * @param {!object} obj Dictionary to extend * @param {!object} src Dictionary to be written into obj * @return {!object} Extended dictionary */ function extend(obj, src) { for (let key in src) { if (src.hasOwnProperty(key)) obj[key] = src[key]; } return obj; } /** * Reports an error to the user by populating the error div with text. * @param {string} text */ function error(text) { const e = document.getElementById('error'); e.textContent = text; e.style.display = 'block'; } /** * Creates a DOM element that belongs to the given CSS class. * @param {string} what * @param {string} className * @return {!HTMLElement} */ function create(what, className) { const e = /** @type {!HTMLElement} */ (document.createElement(what)); if (className) { e.className = className; } return e; } /** * Formats the integer i as a string with "min" leading zeroes. * @param {number} i * @param {number} min * @return {string} */ function digits(i, min) { const s = '' + i; if (s.length >= min) { return s; } else { return ('00000' + s).substr(-min); } } function setupViewport(width, height) { gViewportDims = [width, height]; } /** * Equivalent to range(n) in Python. */ function range(n) { return [...Array(n).keys()]; } /** * Product of a set of numbers. * @param {array} xs * @return {number} */ function product(xs) { result = 1; for (let x of xs) { result *= x; } return result; } /** * Sum of a set of numbers */ function sum(xs) { result = 1; for (let x of xs) { result += x; } return result; } /** * Resizes a DOM element to the given dimensions. * @param {!Element} element * @param {number} width * @param {number} height */ function setDims(element, width, height) { element.style.width = width.toFixed(2) + 'px'; element.style.height = height.toFixed(2) + 'px'; } /** * Hides the loading prompt. */ function hideLoading() { let loading = document.getElementById('Loading'); loading.style.display = 'none'; let loadingContainer = document.getElementById('loading-container'); loadingContainer.style.display = 'none'; } /** Show the loading prompt */ function showLoading() { let loading = document.getElementById('Loading'); loading.style.display = 'none'; let loadingContainer = document.getElementById('loading-container'); loadingContainer.style.display = 'none'; } /** * Returns true if the scene is still loading. */ function isLoading() { const loading = document.getElementById('Loading'); return loading.style.display !== 'none'; } /** * Executed whenever an image is loaded for updating the loading prompt. */ function onImageFetch(value) { gNumTextures++; updateLoadingProgress(); return value; } /** * Executed whenever an image is loaded for updating the loading prompt. */ function onImageLoaded(value) { gLoadedTextures++; updateLoadingProgress(); return value; } /** * Updates the loading progress HTML elements. */ function updateLoadingProgress() { let imageProgress = document.getElementById('image-progress'); const numTexturesString = gNumTextures > 0 ? gNumTextures : '?'; imageProgress.innerHTML = 'Loading images: ' + gLoadedTextures + '/' + numTexturesString; } /** * Checks whether the WebGL context is valid and the underlying hardware is * powerful enough. Otherwise displays a warning. * @return {boolean} */ function isRendererUnsupported() { let loading = document.getElementById('Loading'); let gl = document.getElementsByTagName('canvas')[0].getContext('webgl2'); if (!gl) { loading.innerHTML = 'Error: WebGL2 context not found. Is your machine' + ' equipped with a discrete GPU?'; return true; } let debugInfo = gl.getExtension('WEBGL_debug_renderer_info'); if (!debugInfo) { loading.innerHTML = 'Error: Could not fetch renderer info. Is your' + ' machine equipped with a discrete GPU?'; return true; } return false; } /** * Returns a promise that fires within a specified amount of time. Can be used * in an asynchronous function for sleeping. * @param {number} milliseconds Amount of time to sleep * @return {!Promise} */ function sleep(milliseconds) { return new Promise((resolve) => setTimeout(resolve, milliseconds)); } /** * Given a submodel index, returns path to its scene_params.json file. * * @param {number} submodelId Submodel index. * @param {string} assetName Optional filename. * @return {string} Path to submodel assets. */ function submodelAssetPath(submodelId, assetName) { let prefix = ''; if (gUseSubmodel) { const smIdx = String(submodelId).padStart(3, '0'); prefix = `../sm_${smIdx}`; if (assetName == undefined) { return prefix; } return `${prefix}/${assetName}`; } return assetName; } /** * Determines appropriate submodel index for a position in world coordinates. */ function positionToSubmodel(xyz, sceneParams) { if (gUseSubmodel == false) { return 0; } if (gSubmodelForceIndex >= 0) { return gSubmodelForceIndex; } let fixed_xyz = new THREE.Vector3(-xyz.x, xyz.z, xyz.y); let voxel_resolution = 2 / sceneParams['submodel_voxel_size']; let x_grid = fixed_xyz.addScalar(1.0).divideScalar(2.0); x_grid = x_grid.multiplyScalar(voxel_resolution); let x_floor = x_grid.floor().clampScalar(0, voxel_resolution - 1); const linear_index = (x_floor.x * voxel_resolution + x_floor.y) * voxel_resolution + x_floor.z; return sceneParams['sm_to_params'][linear_index]; } /** * Computes center of submodel in world coordinates. */ function submodelCenter(submodelId, sceneParams) { if (gUseSubmodel == false) { return new THREE.Vector3(0.0, 0.0, 0.0); } /* The submodels are ordered through z, y then x from negative to positive */ let submodelVoxelSize = sceneParams['submodel_voxel_size']; let voxel_resolution = 2 / submodelVoxelSize; let submodelIndex = sceneParams['params_to_sm'][submodelId]; let z_index = submodelIndex % voxel_resolution; let y_index = ((submodelIndex - z_index) / voxel_resolution) % voxel_resolution; let x_index = ((submodelIndex - z_index - y_index * voxel_resolution) / voxel_resolution / voxel_resolution); /* reorder for coordinate systems */ x_index = voxel_resolution - 1 - x_index; [y_index, z_index] = [z_index, y_index]; return new THREE.Vector3( (x_index + 0.5) * submodelVoxelSize - 1.0, (y_index + 0.5) * submodelVoxelSize - 1.0, (z_index + 0.5) * submodelVoxelSize - 1.0); } /** * Creates transform matrix from world coordinates to submodel coordinates. */ function submodelTransform(submodelId, sceneParams) { const submodel_position = submodelCenter(submodelId, sceneParams); const submodel_scale = sceneParams['submodel_scale']; let submodel_scale_matrix = new THREE.Matrix4(); submodel_scale_matrix.makeScale( submodel_scale, submodel_scale, submodel_scale); let submodel_translate_matrix = new THREE.Matrix4(); submodel_translate_matrix.makeTranslation( -submodel_position.x, -submodel_position.y, -submodel_position.z); submodel_matrix = new THREE.Matrix4(); submodel_matrix.multiplyMatrices( submodel_scale_matrix, submodel_translate_matrix); return submodel_matrix; } /** * Safe fetching. Some servers restrict the number of requests and * respond with status code 429 ("Too Many Requests") when a threshold * is exceeded. When we encounter a 429 we retry after a short waiting period. * @param {!object} fetchFn Function that fetches the file. * @return {!Promise} Returns fetchFn's response. */ async function fetchAndRetryIfNecessary(fetchFn) { const response = await fetchFn(); if (response.status === 429) { await sleep(500); return fetchAndRetryIfNecessary(fetchFn); } return response; } /** * Loads binary asset from rgbaURL and decodes it to an Uint8Array. * @param {string} rgbaUrl The URL of the asset image. * @return {!Promise} */ function loadAsset(rgbaUrl) { const result = new Promise((resolve) => { gLoadAssetsWorker.submit({url: rgbaUrl}, resolve); }); return result; } /** * Merge slices into a single array with a web worker. */ function mergeSlices(asset, src, dst) { // Wait for all assets to arrive. let promises = asset.sliceAssets.map((sliceAsset) => sliceAsset.asset); // Nearly all calls to this function merge a list of assets sliced along the // depth dimension. The only exception to this is sparse grid density, which // must merge from >1 sources. let result = Promise.all(promises).then((rawAssets) => { // Replace promises with their actual values let rawSliceAssets = range(rawAssets.length).map((i) => { return { ...asset.sliceAssets[i], asset: rawAssets[i], }; }); // Forward request to worker. let rawAsset = {...asset, sliceAssets: rawSliceAssets}; let request = {asset: rawAsset, src: src, dst: dst, fn: 'mergeSlices'}; return new Promise((resolve) => { gCopySliceWorker.submit(request, resolve); }); }); return result; } /** * Merge slices of sparse grid density into a single array. */ function mergeSparseGridDensity(asset) { // Wait for all assets to arrive. let getAssetPromises = (assetSlices) => Promise.all(assetSlices.sliceAssets.map((sliceAsset) => sliceAsset.asset)); let rgbAndDensityPromise = getAssetPromises(asset.rgbAndDensityAsset); let featuresPromise = getAssetPromises(asset.featuresAsset); let promises = [rgbAndDensityPromise, featuresPromise]; // Nearly all calls to this function merge a list of assets sliced along the // depth dimension. The only exception to this is sparse grid density, which // must merge from >1 sources. let result = Promise.all(promises).then((result) => { let rawRgbAndDensitySliceAssets = result[0]; let rawFeaturesSliceAssets = result[1]; // Replace promises with their actual values let reassembleSliceAssets = (originalSliceAsset, rawSliceAssets) => { let numSliceAssets = rawSliceAssets.length; let sliceAssets = range(numSliceAssets).map((i) => { return {...originalSliceAsset.sliceAssets[i], asset: rawSliceAssets[i]}; }); return {...originalSliceAsset, sliceAssets: sliceAssets}; }; let rawRgbAndDensityAsset = reassembleSliceAssets( asset.rgbAndDensityAsset, rawRgbAndDensitySliceAssets); let rawFeaturesAsset = reassembleSliceAssets(asset.featuresAsset, rawFeaturesSliceAssets); // Forward request to worker. let rawAsset = { assetType: asset.assetType, rgbAndDensityAsset: rawRgbAndDensityAsset, featuresAsset: rawFeaturesAsset, }; let request = { asset: rawAsset, fn: 'mergeSparseGridDensity', }; return new Promise((resolve) => { gCopySliceWorker.submit(request, resolve); }); }); return result; } /** * Get a field's value or return a default value. */ function getFieldOrDefault(obj, field, default_) { let result = obj[field]; if (result == undefined) { return default_; } return result; } /** * Loads a text file using a signed URL. * @param {string} dirUrl The directory part of the file to be loaded * @return {!Promise} */ async function loadTextFile(dirUrl) { try { // Nettoyer les segments relatifs const cleanedDirUrl = cleanPath(dirUrl); // Appeler le serveur pour obtenir l'URL signée const response = await fetch('http://localhost:3001/generate-signed-url', { method: 'POST', headers: { 'Content-Type': 'application/json', }, body: JSON.stringify({ dirUrl: cleanedDirUrl }), // Ne pas encoder ici }); if (!response.ok) { throw new Error(`Erreur lors de la génération de l'URL signée: ${response.status}`); } const { url } = await response.json(); console.log('Tentative de chargement du fichier texte depuis:', url); // Utiliser l'URL signée pour télécharger le fichier const fileResponse = await fetch(url); if (!fileResponse.ok) { throw new Error(`Erreur lors du téléchargement du fichier: ${fileResponse.status}`); } const text = await fileResponse.text(); console.log('Contenu du fichier texte chargé depuis', url, ':', text); return text; } catch (error) { console.error(`Erreur lors de la récupération du fichier texte depuis ${dirUrl}:`, error); throw error; } } /** * Loads and parses a JSON file using a signed URL. * @param {string} dirUrl The directory part of the file to be loaded * @return {!Promise} The parsed JSON file */ async function loadJSONFile(dirUrl) { try { // Nettoyer les segments relatifs const cleanedDirUrl = cleanPath(dirUrl); // Appeler le serveur pour obtenir l'URL signée const response = await fetch('http://localhost:3001/generate-signed-url', { method: 'POST', headers: { 'Content-Type': 'application/json', }, body: JSON.stringify({ dirUrl: cleanedDirUrl }), }); console.log('Chemin avant génération de l\'URL signée:', cleanedDirUrl); if (!response.ok) { throw new Error(`Erreur lors de la génération de l'URL signée: ${response.status}`); } const { url } = await response.json(); console.log('Tentative de chargement du fichier JSON depuis:', url); // Utiliser l'URL signée pour télécharger le fichier const fileResponse = await fetch(url); if (!fileResponse.ok) { throw new Error(`Erreur lors du téléchargement du fichier JSON: ${fileResponse.status}`); } const json = await fileResponse.json(); console.log('Contenu du fichier JSON chargé depuis', url, ':', json); return json; } catch (error) { console.error(`Erreur lors de la récupération du fichier JSON depuis ${dirUrl}:`, error); throw error; } } /** * Cleans the given path by removing any `../` or `./` segments. * @param {string} path The path to be cleaned. * @return {string} The cleaned path. */ function cleanPath(path) { const parts = path.split('/'); const stack = []; for (let i = 0; i < parts.length; i++) { if (parts[i] === '..') { if (stack.length) { stack.pop(); } } else if (parts[i] !== '.' && parts[i] !== '') { stack.push(parts[i]); } } return stack.join('/'); } class Router { /** * Constructor. * @param {string} dirUrl The url where scene files are stored. * @param {?object} filenameToLink Dictionary that maps internal file names to * download links. */ constructor(dirUrl, filenameToLink) { this.dirUrl = dirUrl; this.filenameToLink = filenameToLink; } /** * Maps a virtual filename to an URL. * @param {string} filename Internal filename. * @return {string} Download URL. */ translate(filename) { if (this.filenameToLink != null) { // Lookup download URL in map. return this.filenameToLink[filename]; } else { // Nettoyer les segments relatifs comme `../` du chemin const cleanedDirUrl = this.cleanPath(this.dirUrl); const fullPath = `${cleanedDirUrl}/${filename}`; // Encoder l'URL résultante return fullPath; } } /** * Cleans the given path by removing any `../` or `./` segments. * @param {string} path The path to be cleaned. * @return {string} The cleaned path. */ cleanPath(path) { const parts = path.split('/'); const stack = []; for (let i = 0; i < parts.length; i++) { if (parts[i] === '..') { if (stack.length) { stack.pop(); } } else if (parts[i] !== '.' && parts[i] !== '') { stack.push(parts[i]); } } return stack.join('/'); } } /** Format of a texture */ const Format = { RED: { numChannels: 1 }, LUMINANCE_ALPHA: { numChannels: 2 }, RGB: { numChannels: 3 }, RGBA: { numChannels: 4 }, }; /** Where to copy inputs from */ const GridTextureSource = { RGBA_FROM_RGBA: {format: Format.RGBA, channels: [0, 1, 2, 3]}, RGB_FROM_RGBA: {format: Format.RGBA, channels: [0, 1, 2]}, RGB_FROM_RGB: {format: Format.RGB, channels: [0, 1, 2]}, ALPHA_FROM_RGBA: {format: Format.RGBA, channels: [3]}, RED_FROM_RED: {format: Format.RED, channels: [0]}, LA_FROM_LUMINANCE_ALPHA: {format: Format.LUMINANCE_ALPHA, channels: [0, 1]}, }; /** Where to copy outputs to. **/ const GridTextureDestination = { RED_IN_RED: { format: Format.RED, channels: [0]}, RGB_IN_RGB: { format: Format.RGB, channels: [0, 1, 2] }, RGBA_IN_RGBA: { format: Format.RGBA, channels: [0, 1, 2, 3] }, LA_IN_LUMINANCE_ALPHA: { format: Format.LUMINANCE_ALPHA, channels: [0, 1] }, LUMINANCE_IN_LUMINANCE_ALPHA: { format: Format.LUMINANCE_ALPHA, channels: [0] }, ALPHA_IN_LUMINANCE_ALPHA: { format: Format.LUMINANCE_ALPHA, channels: [1] }, }; // Copyright 2024 The Google Research Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview Progressive rendering. */ /** * This is the main bookkeeping scene for progressive upsampling, as it * keeps track of multiple low-res frames, and their corresponding filter * weights. * @type {?THREE.Scene} */ let gSceneAccumulate = null; /** * A lower res orthographic camera used to kick off ray marching * with a full-screen render pass. * @type {?THREE.OrthographicCamera} */ let gLowResBlitCamera = null; /** * A higher res orthographic camera used to perform full-resolution * post-processing passes. * @type {?THREE.OrthographicCamera} */ let gHighResBlitCamera = null; /** * Keeps track of the camera transformation matrix, so we can turn off * progressive rendering when the camera moves * @type {?THREE.Matrix4} */ let gOldMatrixWorld = null; /** * Keeps track of the camera projection matrix, so we can turn off * progressive rendering when the camera zooms in or out. * @type {?THREE.Matrix4} */ let gOldProjectionMatrix = null; /** * Counts the current frame number, used for random sampling. * @type {number} */ let gFrameIndex = 0; /** * This is a half-res rendertarget used for progressive rendering. * @type {?THREE.WebGLRenderTarget} */ let gLowResTexture = null; /** * @param {!THREE.Texture} textureLowRes * @param {!THREE.Texture} textureHistory * @param {!THREE.Vector2} lowResolution * @param {!THREE.Vector2} highResolution * @return {!THREE.Material} */ function createAccumulateMaterial( textureLowRes, textureHistory, lowResolution, highResolution) { const material = new THREE.ShaderMaterial({ uniforms: { 'mapLowRes': {'value': textureLowRes}, 'mapHistory': {'value': textureHistory}, 'lowResolution': {'value': lowResolution}, 'highResolution': {'value': highResolution}, 'jitterOffset': {'value': new THREE.Vector2(0.0, 0.0)}, 'emaAlpha': {'value': 0.15}, }, vertexShader: accumulateVertexShader, fragmentShader: accumulateFragmentShader, }); return material; } /** * These are the ping-pong buffers used for progressive upsampling. Every frame * we read from one buffer, and write into the other. This allows us to maintain * a history of multiple low-res frames, and their corresponding filter * weights. * @type {!Array} */ const gAccumulationTextures = [null, null]; /** @const {string} */ const normalizeVertexShader = ` varying vec2 vUv; void main() { vUv = uv; gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0); } `; /** @const {string} */ const normalizeFragmentShader = ` varying vec2 vUv; uniform sampler2D map; void main() { gl_FragColor = texture2D(map, vUv); if (gl_FragColor.a > 0.0) { gl_FragColor.rgb /= gl_FragColor.a; } gl_FragColor.a = 1.0; } `; /** * @param {!THREE.Texture} texture * @return {!THREE.Material} */ function createNormalizeMaterial(texture) { const material = new THREE.ShaderMaterial({ uniforms: { 'map': {'value': texture}, }, vertexShader: normalizeVertexShader, fragmentShader: normalizeFragmentShader, }); return material; } /** * Blits a texture into the framebuffer, normalizing the result using * the alpha channel. I.e. pixel_out = pixel_in.rgba / pixel_in.a. * @type {?THREE.Scene} */ let gSceneNormalize = null; /** @const {string} */ const accumulateVertexShader = ` varying vec2 vUv; void main() { vUv = uv; gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0); } `; /** @const {string} */ const accumulateFragmentShader = ` varying vec2 vUv; uniform vec2 lowResolution; uniform vec2 highResolution; uniform vec2 jitterOffset; uniform float emaAlpha; uniform sampler2D mapLowRes; uniform sampler2D mapHistory; float pixelFilter(vec2 pixelCenter, vec2 sampleCenter) { vec2 delta = pixelCenter - sampleCenter; float squaredNorm = dot(delta, delta); return exp(-2.29 * squaredNorm); } void main() { // First we need to compute the coordinates of the pixel centers // in the low resolution grid by compensating for the camera jitter. // Note that the offset is defined in clip space [-1,1]^2, so we need // to multiply it by 0.5 to make it valid in texture space [0,1]^2. vec2 compensatedUnitCoords = vUv - jitterOffset * 0.5; // Now compute the integer coordinates in the low resolution grid for each // adjacent texel. ivec2 lowResCoords00 = ivec2(compensatedUnitCoords * lowResolution - 0.5); ivec2 lowResCoords01 = ivec2(0, 1) + lowResCoords00; ivec2 lowResCoords10 = ivec2(1, 0) + lowResCoords00; ivec2 lowResCoords11 = ivec2(1, 1) + lowResCoords00; float mask00 = min(lowResCoords00.x, lowResCoords00.y) < 0 || lowResCoords00.x >= int(lowResolution.x) || lowResCoords00.y >= int(lowResolution.y) ? 0.0 : 1.0; float mask01 = min(lowResCoords01.x, lowResCoords01.y) < 0 || lowResCoords01.x >= int(lowResolution.x) || lowResCoords01.y >= int(lowResolution.y) ? 0.0 : 1.0; float mask10 = min(lowResCoords10.x, lowResCoords10.y) < 0 || lowResCoords10.x >= int(lowResolution.x) || lowResCoords10.y >= int(lowResolution.y) ? 0.0 : 1.0; float mask11 = min(lowResCoords11.x, lowResCoords11.y) < 0 || lowResCoords11.x >= int(lowResolution.x) || lowResCoords11.y >= int(lowResolution.y) ? 0.0 : 1.0; // We also need to keep track of the high resolution counterparts of these // coordinates, so we can compute the pixel reconstruction filter weights. vec2 compensatedHighResCoords = highResolution * compensatedUnitCoords; vec2 highResCoords00 = highResolution * (vec2(lowResCoords00) + 0.5) / lowResolution; vec2 highResCoords01 = highResolution * (vec2(lowResCoords01) + 0.5) / lowResolution; vec2 highResCoords10 = highResolution * (vec2(lowResCoords10) + 0.5) / lowResolution; vec2 highResCoords11 = highResolution * (vec2(lowResCoords11) + 0.5) / lowResolution; vec4 lowResColor = vec4(0.0, 0.0, 0.0, 0.0); lowResColor += mask00 * vec4( texelFetch(mapLowRes,lowResCoords00, 0).rgb, 1.0) * pixelFilter(compensatedHighResCoords, highResCoords00); lowResColor += mask01 * vec4( texelFetch(mapLowRes, lowResCoords01, 0).rgb, 1.0) * pixelFilter(compensatedHighResCoords, highResCoords01); lowResColor += mask10 * vec4( texelFetch(mapLowRes, lowResCoords10, 0).rgb, 1.0) * pixelFilter(compensatedHighResCoords, highResCoords10); lowResColor += mask11 * vec4( texelFetch(mapLowRes, lowResCoords11, 0).rgb, 1.0) * pixelFilter(compensatedHighResCoords, highResCoords11); vec4 historyColor = texture2D(mapHistory, vUv); gl_FragColor = emaAlpha * lowResColor + (1.0 - emaAlpha) * historyColor; } `; /** * Sets up the state needed for progressive rendering. * @param {!HTMLElement} view The view. * @param {number} lowResFactor The downsampling factor that determines the * initial render resolution. */ function setupProgressiveRendering(view, lowResFactor) { gHighResBlitCamera = new THREE.OrthographicCamera( view.offsetWidth / -2, view.offsetWidth / 2, view.offsetHeight / 2, view.offsetHeight / -2, -10000, 10000); gHighResBlitCamera.position.z = 100; let fullScreenPlane = new THREE.PlaneBufferGeometry(view.offsetWidth, view.offsetHeight); gLowResTexture = new THREE.WebGLRenderTarget( Math.trunc(view.offsetWidth / lowResFactor), Math.trunc(view.offsetHeight / lowResFactor), { minFilter: THREE.NearestFilter, magFilter: THREE.NearestFilter, type: THREE.UnsignedByteType, format: THREE.RGBFormat }); gAccumulationTextures[0] = new THREE.WebGLRenderTarget(view.offsetWidth, view.offsetHeight, { minFilter: THREE.NearestFilter, magFilter: THREE.NearestFilter, type: THREE.FloatType, format: THREE.RGBAFormat }); gAccumulationTextures[1] = new THREE.WebGLRenderTarget(view.offsetWidth, view.offsetHeight, { minFilter: THREE.NearestFilter, magFilter: THREE.NearestFilter, type: THREE.FloatType, format: THREE.RGBAFormat }); let fullScreenAccumulateQuad = new THREE.Mesh( fullScreenPlane, createAccumulateMaterial( gLowResTexture.texture, gAccumulationTextures[1], new THREE.Vector2( Math.trunc(view.offsetWidth / lowResFactor), Math.trunc(view.offsetHeight / lowResFactor)), new THREE.Vector2(view.offsetWidth, view.offsetHeight))); fullScreenAccumulateQuad.position.z = -100; gSceneAccumulate = new THREE.Scene(); gSceneAccumulate.add(fullScreenAccumulateQuad); gSceneAccumulate.autoUpdate = false; let fullScreenNormalizeQuad = new THREE.Mesh( fullScreenPlane, createNormalizeMaterial(gAccumulationTextures[0].texture)); fullScreenNormalizeQuad.position.z = -100; gSceneNormalize = new THREE.Scene(); gSceneNormalize.add(fullScreenNormalizeQuad); gSceneNormalize.autoUpdate = false; gLowResBlitCamera = new THREE.OrthographicCamera( Math.trunc(view.offsetWidth / lowResFactor) / -2, Math.trunc(view.offsetWidth / lowResFactor) / 2, Math.trunc(view.offsetHeight / lowResFactor) / 2, Math.trunc(view.offsetHeight / lowResFactor) / -2, -10000, 10000); gLowResBlitCamera.position.z = 100; gOldProjectionMatrix = gCamera.projectionMatrix.clone(); gOldMatrixWorld = gCamera.matrixWorld.clone(); } /** * Renders a MERF frame. * * @param {?THREE.Matrix4} clip_T_camera (Optional) A (potentially jittered) * projection matrix. */ function renderFrame(clip_T_camera) { if (!clip_T_camera) { clip_T_camera = gCamera.projectionMatrix.clone(); } let camera_T_clip = new THREE.Matrix4(); camera_T_clip.getInverse(clip_T_camera); let world_T_camera = gCamera.matrixWorld; submodel_T_world = gSubmodelTransform; submodel_T_camera = new THREE.Matrix4(); submodel_T_camera.multiplyMatrices(submodel_T_world, world_T_camera); let currentScene = getRayMarchScene(); currentScene .children[0] .material.uniforms['world_T_cam']['value'] = submodel_T_camera; currentScene .children[0] .material.uniforms['cam_T_clip']['value'] = camera_T_clip; currentScene .children[0] .material.uniforms['displayMode']['value'] = gDisplayMode - 0; gRenderer.clear(); gRenderer.render(currentScene, gLowResBlitCamera); } /** * Implements progressive rendering. */ function renderProgressively() { const downSamplingFactor = gAccumulationTextures[0].width / gLowResTexture.width; // Early out by rendering the frame at full res. if (downSamplingFactor == 1) { renderFrame(); return; } let cameraMoved = !gCamera.projectionMatrix.equals(gOldProjectionMatrix) || !gCamera.matrixWorld.equals(gOldMatrixWorld); // // For progressive upsampling, jitter the camera matrix within the pixel // footprint. // // We start by forming a set of jitter offsets that touch every high // resolution pixel center. // These values assume an even downsampling factor. const isEven = (downSamplingFactor % 2) == 0; let jitterOffset = 0.5; let endIndex = Math.trunc(downSamplingFactor / 2); if (!isEven) { // But it's not that hard to correct for this assumption. jitterOffset = 0.5; endIndex += 1; } let samples_x = []; let samples_y = []; for (let i = 0; i < endIndex; i++) { for (let j = 0; j < endIndex; j++) { samples_x.push((jitterOffset + i) / downSamplingFactor); samples_y.push((jitterOffset + j) / downSamplingFactor); samples_x.push(-(jitterOffset + i) / downSamplingFactor); samples_y.push((jitterOffset + j) / downSamplingFactor); samples_x.push((jitterOffset + i) / downSamplingFactor); samples_y.push(-(jitterOffset + j) / downSamplingFactor); samples_x.push(-(jitterOffset + i) / downSamplingFactor); samples_y.push(-(jitterOffset + j) / downSamplingFactor); } } // To set up the jitter properly we need to update the projection matrices // of both our cameras in tandem: // 1) the orthographic blit matrix that kicks off the ray march, and // 2) the perspective projection matrix which computes ray origins/directions. let sample_index = gFrameIndex % samples_x.length; let offset_x = samples_x[sample_index]; let offset_y = samples_y[sample_index]; // First update the orthographic camera, which uses coordinates in // resolution * [-0.5,0,5]^2. gLowResBlitCamera.left = offset_x + gLowResTexture.width / -2; gLowResBlitCamera.right = offset_x + gLowResTexture.width / 2; gLowResBlitCamera.top = offset_y + gLowResTexture.height / 2; gLowResBlitCamera.bottom = offset_y + gLowResTexture.height / -2; gLowResBlitCamera.updateProjectionMatrix(); // After this we will be working with clip space cameras, that have // coordinates in // [-1,1]^2. // So we need to scale the offset accordingly. offset_x *= 2.0 / gLowResTexture.width; offset_y *= 2.0 / gLowResTexture.height; // Now adjust the projection matrix that computes the ray parameters. let clip_T_camera = gCamera.projectionMatrix.clone(); clip_T_camera.elements[8] += offset_x; clip_T_camera.elements[9] += offset_y; // // Now we can do the volume rendering at a lower resolution. // gRenderer.setRenderTarget(gLowResTexture); renderFrame(clip_T_camera); // // Finally collect these low resolution samples into our high resolution // accumulation bufer. // // With more subsampling we need to average more aggressively over time. This // is controled by emaAlpha (exponential moving average), which averages more // when the value gets smaller. This formula for setting emaAlpha was hand- // tuned to work well in bicycle. let emaAlpha = Math.min(1.0, 0.7 / samples_x.length); if (cameraMoved) { gFrameIndex = 0; emaAlpha = 1.0; } let accumulationTargetIndex = gFrameIndex % 2; let accumulationReadIndex = 1 - accumulationTargetIndex; gRenderer.setRenderTarget(gAccumulationTextures[accumulationTargetIndex]); gSceneAccumulate.children[0].material.uniforms['mapHistory']['value'] = gAccumulationTextures[accumulationReadIndex].texture; gSceneAccumulate.children[0].material.uniforms['jitterOffset']['value'] = new THREE.Vector2(offset_x, offset_y); gSceneAccumulate.children[0].material.uniforms['emaAlpha']['value'] = emaAlpha; gRenderer.clear(); gRenderer.render(gSceneAccumulate, gHighResBlitCamera); gRenderer.setRenderTarget(null); gSceneNormalize.children[0].material.uniforms['map']['value'] = gAccumulationTextures[accumulationTargetIndex].texture; gRenderer.clear(); gRenderer.render(gSceneNormalize, gHighResBlitCamera); gFrameIndex++; gOldProjectionMatrix = gCamera.projectionMatrix.clone(); gOldMatrixWorld = gCamera.matrixWorld.clone(); } // Copyright 2024 The Google Research Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * Compute the linearized index and corresponding weight for trilerp. * * @param {!THREE.Vector3} position * @param {!THREE.Vector3} cornerIndices * @param {number} gridSize * @return {!Array} */ function computeTrilerpLocationsAndWeights(position, cornerIndices, gridSize) { // Convert the submodel-space position to 3D grid coordinates with half // voxel centers. let gridPosition = new THREE.Vector3().copy(position); gridPosition.addScalar(1.0); gridPosition.divideScalar(2.0); gridPosition.multiplyScalar(gridSize); gridPosition.subScalar(0.5); // Compute the xyz indices for the vertex specified by cornerIndices. const floorPosition = new THREE.Vector3().copy(gridPosition).floor(); const ceilPosition = new THREE.Vector3().copy(gridPosition).ceil(); let x = cornerIndices.x > 0 ? ceilPosition.x : floorPosition.x; let y = cornerIndices.y > 0 ? ceilPosition.y : floorPosition.y; let z = cornerIndices.z > 0 ? ceilPosition.z : floorPosition.z; // Clamp to the grid size. x = Math.min(Math.max(x, 0), gridSize - 1); y = Math.min(Math.max(y, 0), gridSize - 1); z = Math.min(Math.max(z, 0), gridSize - 1); // Transform the coordinates for to match the JAX coordinate system. x = gridSize - 1 - x; // Reverse x. [y, z] = [z, y]; // Swap y and z. // And linearize the coordinates to a single 1D index. const index = (z * gridSize + y) * gridSize + x; // Finally, compute the trilinear interpolation weight for this sample. let wx = gridPosition.x - floorPosition.x; let wy = gridPosition.y - floorPosition.y; let wz = gridPosition.z - floorPosition.z; if (cornerIndices.x == 0) { wx = 1.0 - wx; } if (cornerIndices.y == 0) { wy = 1.0 - wy; } if (cornerIndices.z == 0) { wz = 1.0 - wz; } const w = wx * wy * wz; return [index, w]; } /** * Trilinearly interpolate MLP kernel weights and return them as a texture. * * @param {number} submodel * @param {number} level * @param {number} position * @return {!THREE.DataTexture} */ function trilerpDeferredMlpKernel(submodel, level, position) { let newHeight, newWidth, weightsData; // Trilinearly interpolate the MLP weights if we have a grid of weights. if (!!gDeferredMlp['ResampleDense_' + level + '/kernel']) { const kernelDict = gDeferredMlp['ResampleDense_' + level + '/kernel']; const weights = kernelDict['data']; const gridSize = kernelDict['shape'][1]; const width = kernelDict['shape'][4]; const height = kernelDict['shape'][5]; newHeight = makeMultipleOf(height, 4); newWidth = makeMultipleOf(width, 4); weightsData = new Float32Array(newWidth * newHeight); // Right now we define our grid in world-coordinates, but position is in // submodel-space, so we have to transform it back. let worldPosition = new THREE.Vector3().copy(position); worldPosition.divideScalar(getSubmodelScaleFactor(submodel)); const submodelOffset = submodel * gridSize * gridSize * gridSize * width * height; for (let dx = 0; dx < 2; dx++) { for (let dy = 0; dy < 2; dy++) { for (let dz = 0; dz < 2; dz++) { const [mlpIndex, trilerpWeight] = computeTrilerpLocationsAndWeights( worldPosition, new THREE.Vector3(dx, dy, dz), gridSize); const weightOffset = submodelOffset + width * height * mlpIndex; for (let co = 0; co < newHeight; co++) { for (let ci = 0; ci < newWidth; ci++) { let index = co * newWidth + ci; let weight = 0.0; if (ci < width && co < height) { weight = weights[weightOffset + ci * height + co]; } if (dx + dy + dz === 0) { weightsData[index] = trilerpWeight * weight; } else { weightsData[index] += trilerpWeight * weight; } } } } } } } else { // Otherwise just set them directly. const kernelDict = gDeferredMlp['Dense_' + level + '/kernel']; const weights = kernelDict['data']; const width = kernelDict['shape'][0]; const height = kernelDict['shape'][1]; newHeight = makeMultipleOf(height, 4); newWidth = makeMultipleOf(width, 4); weightsData = new Float32Array(newWidth * newHeight); for (let co = 0; co < newHeight; co++) { for (let ci = 0; ci < newWidth; ci++) { let index = co * newWidth + ci; if (ci < width && co < height) { weightsData[index] = weights[ci * height + co]; } } } } let weightsDataNew = new Float32Array(newWidth * newHeight); for (let j = 0; j < newWidth; j += 4) { for (let i = 0; i < newHeight; i++) { for (let c = 0; c < 4; c++) { weightsDataNew[(j / 4) * newHeight * 4 + i * 4 + c] = weightsData[(j / 4) * 4 + i * ((newWidth / 4) * 4) + c]; } } } let texture = new THREE.DataTexture( weightsDataNew, 1, newWidth * newHeight / 4, THREE.RGBAFormat); texture.magFilter = THREE.NearestFilter; texture.minFilter = THREE.NearestFilter; texture.type = THREE.FloatType; return texture; } /** * Trilinearly interpolate MLP biases and return them as a list of Vector4s. * * @param {number} submodel * @param {number} level * @param {number} position * @return {!Array} */ function trilerpDeferredMlpBiases(submodel, level, position) { let biasList; // Trilinearly interpolate the MLP weights if we have a grid of weights. if (!!gDeferredMlp['ResampleDense_' + level + '/bias']) { const biasDict = gDeferredMlp['ResampleDense_' + level + '/bias']; const biases = biasDict['data']; const gridSize = biasDict['shape'][1]; const height = biasDict['shape'][4]; const newHeight = makeMultipleOf(height, 4); biasList = new Array(newHeight / 4); // Right now we define our grid in world-coordinates, but position is in // submodel-space, so we have to transform it back. let worldPosition = new THREE.Vector3().copy(position); worldPosition.divideScalar(getSubmodelScaleFactor(submodel)); const submodelOffset = submodel * gridSize * gridSize * gridSize * height; for (let dx = 0; dx < 2; dx++) { for (let dy = 0; dy < 2; dy++) { for (let dz = 0; dz < 2; dz++) { const [mlpIndex, trilerpWeight] = computeTrilerpLocationsAndWeights( worldPosition, new THREE.Vector3(dx, dy, dz), gridSize); const biasOffset = submodelOffset + height * mlpIndex; for (let biasIndex = 0; biasIndex < newHeight / 4; ++biasIndex) { let vector = new THREE.Vector4(0.0, 0.0, 0.0, 0.0); for (let ci = 0; ci < 4; ci++) { if (biasIndex * 4 + 0 < newHeight) { vector.setComponent( ci, biases[biasOffset + biasIndex * 4 + ci]); } } vector.multiplyScalar(trilerpWeight); if (dx + dy + dz === 0) { biasList[biasIndex] = vector; } else { biasList[biasIndex].add(vector); } } } } } } else { // Otherwise just set them directly. const biasDict = gDeferredMlp['Dense_' + level + '/bias']; const biases = biasDict['data']; const height = biasDict['shape'][0]; const newHeight = makeMultipleOf(height, 4); biasList = new Array(newHeight / 4); for (let biasIndex = 0; biasIndex < newHeight / 4; ++biasIndex) { let vector = new THREE.Vector4(0.0, 0.0, 0.0, 0.0); for (let ci = 0; ci < 4; ci++) { if (biasIndex * 4 + 0 < newHeight) { vector.setComponent(ci, biases[biasIndex * 4 + ci]); } } biasList[biasIndex] = vector; } } return biasList; } /** * Rewrite shader with view dependence definitions * * @param {!Object} scene_params * @param {string} shader * @return {string} */ function rewriteViewDependenceDefinitions(scene_params, shader) { let network_weights = getDeferredMlp(); const mlpName = !!network_weights['ResampleDense_0/kernel'] ? 'ResampleDense_' : 'Dense_'; const si = !!network_weights['ResampleDense_0/kernel'] ? 4 : 0; // Write bias values as uniform references. let fragmentShaderSource = shader; // Initialize output activations for each layer. The following code generates // lines like, // intermediate_one[0] = bias_0[0]; intermediate_one[1] = bias_0[1]; ... let layer_output_variable_names = ['intermediate_one', 'intermediate_two', 'result']; for (let layerIndex = 0; layerIndex < 3; layerIndex++) { let width = network_weights[mlpName + layerIndex + '/bias'].shape[si]; let inputVar = `bias_${layerIndex}`; let outputVar = layer_output_variable_names[layerIndex]; let lines = []; for (let i = 0; i < width / 4; i++) { lines.push(`${outputVar}[${i}] = ${inputVar}[${i}];`); } let biasLines = lines.join(' ') + '\n'; fragmentShaderSource = fragmentShaderSource.replace( new RegExp(`INITIALIZE_OUTPUT_ACTIVATIONS_${layerIndex}`, 'g'), biasLines); } let channelsZero = makeMultipleOf(network_weights[mlpName + '0/kernel'].shape[si], 4); let channelsOne = makeMultipleOf(network_weights[mlpName + '0/bias'].shape[si], 4); let channelsTwo = makeMultipleOf(network_weights[mlpName + '1/bias'].shape[si], 4); let channelsThree = makeMultipleOf(network_weights[mlpName + '2/bias'].shape[si], 4); let posEncScales = 4; fragmentShaderSource = fragmentShaderSource.replace( new RegExp('NUM_CHANNELS_ZERO', 'g'), channelsZero); fragmentShaderSource = fragmentShaderSource.replace( new RegExp('NUM_POSENC_SCALES', 'g'), posEncScales.toString()); fragmentShaderSource = fragmentShaderSource.replace( new RegExp('NUM_CHANNELS_ONE', 'g'), channelsOne); fragmentShaderSource = fragmentShaderSource.replace( new RegExp('NUM_CHANNELS_TWO', 'g'), channelsTwo); fragmentShaderSource = fragmentShaderSource.replace( new RegExp('NUM_CHANNELS_THREE', 'g'), channelsThree); return fragmentShaderSource; } /** * @param {number} x * @param {number} y * @return {number} */ function makeMultipleOf(x, y) { if (x % y == 0) { return x; } else { return x + y - x % y; } } // Copyright 2024 The Google Research Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview Defines default camera pose for each scene. */ /** * Set initial camera pose depending on the scene. * @param {string} dirUrl The url where scene files are stored. * @param {!THREE.Vector3} submodelCenter The world-space center of the * current submodel. */ function setupInitialCameraPose(dirUrl, submodelCenter) { initialPoses = { 'default': { 'position': [0.0, 0.0, 0.0], 'lookat': [0.0, 0.0, 1.0], }, 'gardenvase': { 'position': [-1.1868985500525444, 0.1898527233835131, -0.04923970470097733], 'lookat': [-0.05581392405861873, -0.40202760746449473, 0.02985343723310108], }, 'stump': { 'position': [0.0, 0.4, -0.8], 'lookat': [0.0, -0.3, 0.0], }, 'flowerbed': { 'position': [-0.02402388218043944, 0.11825367482140309, 0.907525093384825], 'lookat': [0.016306507293821822, -0.15676691106539536, -0.016192691610482132], }, 'treehill': { 'position': [-0.70994804046872, 0.19435986647308223, 0.30833533637897453], 'lookat': [0.06327294888291587, -0.13299740290200024, 0.0037554887097183934], }, 'bicycle': { 'position': [-0.4636408064933045, 0.49624791762954734, 0.8457540259646037], 'lookat': [0.017170160491904368, -0.24649043500978007, -0.07787524806850904], }, 'kitchenlego': { 'position': [-0.5872864419408019, 0.05633623000443683, -0.9472239198227385], 'lookat': [0.07177184299031553, -0.4020277194862108, 0.04850453170234236], }, 'fulllivingroom': { 'position': [1.1539572663654272, -0.006785278327404387, -0.0972986385811351], 'lookat': [-0.05581392405861873, -0.40202760746449473, 0.02985343723310108], }, 'kitchencounter': { 'position': [-0.7006764413546107, 0.2255633917824672, -0.46941182833135847], 'lookat': [0.13197415755218864, -0.4020278046227117, 0.09221809216932579], }, 'officebonsai': { 'position': [-0.4773314920559294, 0.05409730603092788, 1.014304107335418], 'lookat': [0.11970974858222336, -0.40426664345968033, -0.019801655674420764], }, }; /** * Quick helper function to set the lookat point regardless of camera * controls. * @param {number} x * @param {number} y * @param {number} z */ function cameraLookAt(x, y, z) { if (gOrbitControls) { gOrbitControls.target.x = x; gOrbitControls.target.y = y; gOrbitControls.target.z = z; } else if (gMapControls) { gMapControls.target.x = gCamera.position.x + (x - gCamera.position.x) * gCamera.near; gMapControls.target.y = gCamera.position.y + (y - gCamera.position.y) * gCamera.near; gMapControls.target.z = gCamera.position.z + (z - gCamera.position.z) * gCamera.near; } else { gCamera.lookAt(x, y, z); } } function setCameraPose(d) { gCamera.position.x = d['position'][0] + submodelCenter.x; gCamera.position.y = d['position'][1] + submodelCenter.y; gCamera.position.z = d['position'][2] + submodelCenter.z; cameraLookAt( d['lookat'][0] + submodelCenter.x, d['lookat'][1] + submodelCenter.y, d['lookat'][2] + submodelCenter.z); } setCameraPose(initialPoses['default']); for (let sceneName in initialPoses) { if (dirUrl.includes(sceneName)) { setCameraPose(initialPoses[sceneName]); break; } } gCamera.updateProjectionMatrix(); } // Copyright 2024 The Google Research Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview Input event handling. */ /** * We control the camera using either orbit controls... * @type {?THREE.OrbitControls} */ let gOrbitControls = null; /** * Map-controls, which are orbit controls with custom arguments, ... * @type {?THREE.OrbitControls} */ let gMapControls = null; /** * ...or for large scenes we use FPS-style controls. * @type {?THREE.PointerLockControls} */ let gPointerLockControls = null; // With PointerLockControls we have to track key states ourselves. /** @type {boolean} */ let gKeyW = false; /** @type {boolean} */ let gKeyA = false; /** @type {boolean} */ let gKeyS = false; /** @type {boolean} */ let gKeyD = false; /** @type {boolean} */ let gKeyQ = false; /** @type {boolean} */ let gKeyE = false; /** @type {boolean} */ let gKeyShift = false; /** * Keeps track of frame times for smooth camera motion. * @type {!THREE.Clock} */ const gClock = new THREE.Clock(); /** * Adds event listeners to UI. */ function addHandlers() { let shaderEditor = document.getElementById('shader-editor'); document.addEventListener('keypress', function(e) { if (document.activeElement.tagName.toLowerCase() === 'input' || document.activeElement.tagName.toLowerCase() === 'textarea') { return; // Ne pas interférer avec les champs d'input } if (document.activeElement === shaderEditor) { return; } if (e.keyCode === 32 || e.key === ' ' || e.key === 'Spacebar') { if (gDisplayMode == DisplayModeType.DISPLAY_NORMAL) { gDisplayMode = DisplayModeType.DISPLAY_DIFFUSE; console.log('Displaying DIFFUSE'); } else if (gDisplayMode == DisplayModeType.DISPLAY_DIFFUSE) { gDisplayMode = DisplayModeType.DISPLAY_FEATURES; console.log('Displaying DISPLAY_FEATURES'); } else if (gDisplayMode == DisplayModeType.DISPLAY_FEATURES) { gDisplayMode = DisplayModeType.DISPLAY_VIEW_DEPENDENT; console.log('Displaying DISPLAY_VIEW_DEPENDENT'); } else if (gDisplayMode == DisplayModeType.DISPLAY_VIEW_DEPENDENT) { gDisplayMode = DisplayModeType.DISPLAY_COARSE_GRID; console.log('Displaying DISPLAY_COARSE_GRID'); } else /* gDisplayModeType == DisplayModeType.DISPLAY_COARSE_GRID */ { gDisplayMode = DisplayModeType.DISPLAY_NORMAL; console.log('Displaying DISPLAY_NORMAL'); } e.preventDefault(); } if (e.key === 'r') { console.log('Recompile shader.'); let material = getRayMarchScene().children[0].material; material.fragmentShader = shaderEditor.value; material.needsUpdate = true; e.preventDefault(); } if (e.key === '?') { let position = gCamera.getWorldPosition(new THREE.Vector3(0., 0., 0.)); let direction = gCamera.getWorldQuaternion(new THREE.Quaternion()); console.log(` // Camera Info: gCamera.position.set(${position.x}, ${position.y}, ${position.z}); gCamera.quaternion.set(${direction.x}, ${direction.y}, ${direction.z}, ${ direction.w}); `); e.preventDefault(); } }); document.addEventListener('keydown', function(e) { if (document.activeElement.tagName.toLowerCase() === 'input' || document.activeElement.tagName.toLowerCase() === 'textarea') { return; // Ne pas interférer avec les champs d'input } if (document.activeElement === shaderEditor) { return; } let key = e.key.toLowerCase(); if (key === 'w') { gKeyW = true; e.preventDefault(); } if (key === 'a') { gKeyA = true; } if (key === 's') { gKeyS = true; e.preventDefault(); } if (key === 'd') { gKeyD = true; e.preventDefault(); } if (key === 'q') { gKeyQ = true; e.preventDefault(); } if (key === 'e') { gKeyE = true; e.preventDefault(); } if (e.key === 'Shift') { gKeyShift = true; e.preventDefault(); } }); document.addEventListener('keyup', function(e) { if (document.activeElement.tagName.toLowerCase() === 'input' || document.activeElement.tagName.toLowerCase() === 'textarea') { return; // Ne pas interférer avec les champs d'input } if (document.activeElement === shaderEditor) { return; } let key = e.key.toLowerCase(); if (key === 'w') { gKeyW = false; e.preventDefault(); } if (key === 'a') { gKeyA = false; } if (key === 's') { gKeyS = false; e.preventDefault(); } if (key === 'd') { gKeyD = false; e.preventDefault(); } if (key === 'q') { gKeyQ = false; e.preventDefault(); } if (key === 'e') { gKeyE = false; e.preventDefault(); } if (e.key === 'Shift') { gKeyShift = false; e.preventDefault(); } }); } /** * Sets up the camera controls. * @param {string} mouseMode Either "orbit", "fps" or "map". * @param {!HTMLElement} view The view. */ function setupCameraControls(mouseMode, view) { if (mouseMode && mouseMode == 'fps') { gPointerLockControls = new THREE.PointerLockControls(gCamera, view); let startButton = document.createElement('button'); startButton.innerHTML = 'Commencer la visite !'; startButton.classList.add('mouse-navigation-button'); // Ajoute la classe au bouton startButton.addEventListener('click', function() { gPointerLockControls.lock(); gPointerLockControls.connect(); startButton.classList.add('hidden'); // Ajoute la classe hidden pour masquer le bouton }, false); // Écoute pour la sortie du mode FPS et réaffiche le bouton si nécessaire gPointerLockControls.addEventListener('unlock', function() { startButton.classList.remove('hidden'); // Affiche le bouton lorsque la souris est débloquée (via "Échap") }); view.appendChild(startButton); } else if (mouseMode && mouseMode == 'map') { gMapControls = new THREE.OrbitControls(gCamera, view); gMapControls.panSpeed = 0.5 / gCamera.near; gMapControls.enableZoom = false; gMapControls.screenSpacePanning = false; gMapControls.mouseButtons = { LEFT: THREE.MOUSE.ROTATE, RIGHT: THREE.MOUSE.PAN }; gMapControls.touches = { ONE: THREE.TOUCH.PAN, TWO: THREE.TOUCH.DOLLY_ROTATE, }; } else { // mouseMode == 'orbit' gOrbitControls = new THREE.OrbitControls(gCamera, view); gOrbitControls.screenSpacePanning = true; gOrbitControls.zoomSpeed = 0.5; // Disable damping until we have temporal reprojection for upscaling. // gOrbitControls.enableDamping = true; } } /** * Updates the camera based on user input. */ function updateCameraControls() { if (gOrbitControls) { gOrbitControls.update(); } else if (gMapControls) { gMapControls.update(); } else if (gPointerLockControls) { const elapsed = gClock.getDelta(); let movementSpeed = 0.25; if (gKeyShift) { movementSpeed = 1; } let camForward = gCamera.getWorldDirection(new THREE.Vector3(0., 0., 0.)); let upVec = new THREE.Vector3(0., 1., 0.); if (gKeyW) { // gPointerLockControls.moveForward undesirably restricts movement to the // X-Z-plane. gCamera.position = gCamera.position.addScaledVector(camForward, elapsed * movementSpeed); } if (gKeyA) { gPointerLockControls.moveRight(-elapsed * movementSpeed); } if (gKeyS) { gCamera.position = gCamera.position.addScaledVector( camForward, -elapsed * movementSpeed); } if (gKeyD) { gPointerLockControls.moveRight(elapsed * movementSpeed); } if (gKeyQ) { gCamera.position = gCamera.position.addScaledVector(upVec, -elapsed * movementSpeed); } if (gKeyE) { gCamera.position = gCamera.position.addScaledVector(upVec, elapsed * movementSpeed); } } } // Copyright 2024 The Google Research Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview Render time benchmarking logic. */ /** * Whether the benchmark mode is currently in a cool-down state. */ let gIsCoolingDown = false; /** * A list of frame timestamps, used for benchmarking. */ let gBenchmarkTimestamps = null; let gFrameTimes = []; /** * A dictionary of camera poses for benchmarking * @type {!object} */ let gBenchmarkCameras = {}; /** * Index of the current test camera that's being rendered for benchmarking. * @type {number} */ let gBenchmarkCameraIndex = 0; /** * We use this constant as a prefix when saving benchmark output files. * @type {string} */ const gBenchmarkMethodName = 'blockmerf'; /** * We use this constant as a prefix when saving benchmark output files. * @type {?string} */ let gBenchmarkSceneName = null; /** * Whether output images should be saved or not. * @type {boolean} */ let gSaveBenchmarkFrames = false; /** * Shows the benchmark stats window and sets up the event listener for it. * @param {string} sceneName The name of the current scene. * @param {boolean} saveImages Should the benchmark images be saved to disk? */ function setupBenchmarkStats(sceneName, saveImages) { gBenchmarkSceneName = sceneName; gSaveBenchmarkFrames = saveImages; let benchmarkStats = document.getElementById('benchmark-stats'); benchmarkStats.style.display = 'block'; benchmarkStats.addEventListener('click', e => { gBenchmark = true; }); } /** * Clears the benchmark stats content. * @param {!object} str ... */ function clearBenchmarkStats(str) { let benchmarkStats = document.getElementById('benchmark-stats'); benchmarkStats.innerHTML = ''; } /** * Adds a row of text to the benchmark stats window. * @param {!object} str ... */ function addBenchmarkRow(str) { let benchmarkStats = document.getElementById('benchmark-stats'); benchmarkStats.innerHTML += str + '\n'; } /** * Returns the benchmark stats output string. * @param {!object} str ... * @return {!object} ... */ function getBenchmarkStats(str) { const benchmarkStats = document.getElementById('benchmark-stats'); return benchmarkStats.innerHTML; } /** * Loads the pose and projection matrices for the images used for benchmarking. * @param {!FilenameToLinkTranslator} filenameToLinkTranslator */ function loadBenchmarkCameras(filenameToLinkTranslator) { const benchmarkCamerasUrl = filenameToLinkTranslator.translate('test_frames.json'); const benchmarkCamerasPromise = loadJSONFile(benchmarkCamerasUrl); benchmarkCamerasPromise.catch(error => { console.error( 'Could not load test frames from: ' + benchmarkCamerasUrl + ', error: ' + error); return; }); benchmarkCamerasPromise.then(parsed => { gBenchmarkCameras = parsed['test_frames']; }); } /** * Sets the pose & projection matrix of the camera re-render a benchmark image. * @param {!THREE.PerspectiveCamera} camera The camera whose pose and projection * matrix we're changing. * @param {number} index The index of the benchmark image want to re-render. */ function setBenchmarkCameraPose(camera, index) { camera.position.fromArray(gBenchmarkCameras[index]['position']); camera.setRotationFromMatrix( new THREE.Matrix4().fromArray(gBenchmarkCameras[index]['rotation'])); camera.projectionMatrix.fromArray(gBenchmarkCameras[index]['projection']); } /** * Cools the GPU down between benchmarking runs. * * This function does the minimal work possible (i.e. clearing the screen to * a new color), to keep both the GPU driver and Javascript animation scheduler * active while also letting the GPU cores cool down. * @param {!object} t ... */ function cooldownFrame(t) { const alpha = 0.5 * (1.0 + Math.sin(t * Math.PI / 1000.0)); let clearColor = new THREE.Color('#FFFFFF'); clearColor.lerp(new THREE.Color('#A5C0E2'), alpha); gRenderer.setClearColor(clearColor, 1.0); gRenderer.clear(); if (gStats) { gStats.update(); } if (gIsCoolingDown) { requestAnimationFrame(cooldownFrame); } } /** * Returns the current timestamp formatted as a string. * * Example: "2023_11_22_1042" * * @returns {string} */ function formatTimestampAsString() { const date = new Date(); const hours = date.getHours().toString().padStart(2, '0'); const minutes = date.getMinutes().toString().padStart(2, '0'); return `${date.getFullYear()}_${date.getMonth() + 1}_${date.getDate()}` + `_${hours}${minutes}`; } /** * Benchmarks performance by rendering test images while measuring frame times. * * You can use this function by calling it after all webgl calls have been * completed for a frame, just before the next call to requestAnimationFrame(). * * Note however that this function has been designed to keep the GPU cool and * might want to delay the call to requestAnimationFrame() by a certain delay. * This is why defaultScheduleFrame is a parameter and why the return values * is a similar function (that may have an additional delay inserted). * * @param {!object} defaultScheduleFrame The function the renderer normally * uses to schedule the next frame for rendering. * @returns {!object} */ function benchmarkPerformance(defaultScheduleFrame) { // These constants were tuned to get repeatable results in the bicycle scene // on an iPhone 15 Pro and a 2019 16" MacBook Pro with an AMD Radeon 5500M. const kCoolDownSeconds = 0.0; const kMaxFramesPerCamera = Math.max(4, Math.ceil(100 / gFrameMult)); const kNumFramesToDiscard = Math.max(2, Math.ceil(0.1 * kMaxFramesPerCamera)); // We start benchmarking only after gLastFrame has first been set. if (isLoading()) { return defaultScheduleFrame; } // We use the first frame after loading the scene to set up the // benchmarking state and cool the GPU down. if (!gBenchmarkTimestamps && !gIsCoolingDown) { setBenchmarkCameraPose(gCamera, 0); gBenchmarkTimestamps = []; if (kCoolDownSeconds > 0.0) { clearBenchmarkStats(); addBenchmarkRow(`Cooling the GPU down for ${ kCoolDownSeconds} seconds before benchmarking...`); gIsCoolingDown = true; requestAnimationFrame(cooldownFrame); return () => { setTimeout(() => { let s = new THREE.Vector2(); gRenderer.getSize(s); clearBenchmarkStats(); addBenchmarkRow(`frame timestamps (ms) at ${s.x}x${s.y}`); addBenchmarkRow('cam_idx ; start ; end ; mean frame time'); gIsCoolingDown = false; defaultScheduleFrame(); }, 1000 * kCoolDownSeconds); }; } let s = new THREE.Vector2(); gRenderer.getSize(s); clearBenchmarkStats(); addBenchmarkRow(`frame timestamps (ms) at ${s.x}x${s.y}`); addBenchmarkRow('cam_idx ; start ; end ; mean frame time'); return defaultScheduleFrame; } gBenchmarkTimestamps.push(window.performance.now()); // Let the default frame scheduler proceed if we're still gathering frames. if (gBenchmarkTimestamps.length < kMaxFramesPerCamera) { return defaultScheduleFrame; } if (gSaveBenchmarkFrames) { frameAsPng = gRenderer.domElement.toDataURL('image/png'); saveAs(frameAsPng, digits(gBenchmarkCameraIndex, 4) + '.png'); } // Now that we have enough frames we can compute frame-time statistics. let benchmarkTimestamps = gBenchmarkTimestamps.slice(kNumFramesToDiscard); const numBenchmarkFrames = benchmarkTimestamps.length; const firstFrameTimestamp = benchmarkTimestamps[0]; const lastFrameTimestamp = benchmarkTimestamps.pop(); let meanTime = (lastFrameTimestamp - firstFrameTimestamp) / (gFrameMult * (numBenchmarkFrames - 1)); gFrameTimes.push(meanTime); // Report them in the benchmark console. addBenchmarkRow(`${gBenchmarkCameraIndex} ; ${firstFrameTimestamp} ; ${ lastFrameTimestamp} ; ${meanTime}`); // No more cameras: stop benchmarking, and store the results as a CSV file. if (++gBenchmarkCameraIndex >= gBenchmarkCameras.length) { console.log(gFrameTimes.reduce((a, b) => a + b, 0) / gFrameTimes.length); gBenchmark = false; const csvBlob = new Blob([getBenchmarkStats()], {type: 'text/plain;charset=utf-8'}); const csvName = gBenchmarkMethodName + '_' + gBenchmarkSceneName + '_' + 'frameMult_' + gFrameMult + '_' + formatTimestampAsString() + '.csv'; saveAs(csvBlob, csvName); return defaultScheduleFrame; } // Otherwise, set things up for benchmarking the next camera pose and sleep // for the cooldown time to avoid biased results from thermal throttling. gBenchmarkTimestamps = []; setBenchmarkCameraPose(gCamera, gBenchmarkCameraIndex); if (kCoolDownSeconds > 0.0) { gIsCoolingDown = true; requestAnimationFrame(cooldownFrame); return () => { setTimeout(() => { gIsCoolingDown = false; defaultScheduleFrame(); }, 1000 * kCoolDownSeconds); }; } return defaultScheduleFrame; } // Copyright 2024 The Google Research Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview Main driver for web viewer. */ /** * panel for current submodel */ let gSubmodelPanel = null; let gVMemPanel = null; /** * Number of sample points per voxel. * @type {number} */ let gStepMult = 1; /** * For large scenes with varying exposure we set this value to be the exposure * of the virtual camera (shutter_speed_in_seconds * iso / 1000). * @type {number} */ let gExposure = null; /** * Loads full scene representation. * * This includes all submodel assets, including allocation and download. * * This function should be called exactly once. * * @param {string} dirUrl Either points to a directory that contains scene files * or to a json file that maps virtual filenames to * download links * @param {!object} overrideParams A dictionary that contains overrides for the * params in scene_params.json (e.g. combineMode, deferredMode or useBits). */ function loadScene(dirUrl, overrideParams) { // Check if dirUrl points to a json file or to a directory. let filenameToLinkPromise; if (dirUrl && dirUrl.includes('.json')) { // If this is the case, we fetch a JSON file that maps filenames to links. filenameToLinkPromise = loadJSONFile(dirUrl); } else { // Otherwise, the scene files directly lie at dirUrl and we create a // dummy promise that resolves immediately. filenameToLinkPromise = Promise.resolve(null); console.error('dirUrl est null ou non valide. Le fichier Json est dans le dossier'); } // This variable is defined in progressive.js. filenameToLinkPromise .then(filenameToLink => { // Mapping from fake filenames to real filenames under root directory // dirUrl. const router = new Router(dirUrl, filenameToLink); console.log('router:', router) // Loads scene parameters (voxel grid size, view-dependence MLP). const sceneParamsUrl = router.translate('scene_params.json'); console.log('sceneParamsUrl:',sceneParamsUrl); const sceneParamsPromise = loadJSONFile(sceneParamsUrl); console.log('sceneParamsPromise:',sceneParamsPromise); if (overrideParams['loadBenchmarkCameras']) { loadBenchmarkCameras(router); } // Some of the shader code is stored in seperate files. return Promise.all([sceneParamsPromise, {router, filenameToLink}]); }) .then(parsed => { // scene_params.json for this scene. // Translates filenames to full URLs. const [sceneParams, carry] = parsed; // Determine if there are multiple submodels or not. If so, figure out // how many. let initialSubmodelIndex = 0; gUseSubmodel = (sceneParams.hasOwnProperty('num_local_submodels') && sceneParams['num_local_submodels'] > 1); if (gUseSubmodel) { // Override default submodel to the user chose by URL. gSubmodelCount = sceneParams['num_local_submodels']; initialSubmodelIndex = sceneParams['sm_to_params'][sceneParams['submodel_idx']]; } // Get links to scene_params.json files for each submodel. let sceneParamsPromises = []; for (let si = 0; si < gSubmodelCount; ++si) { // Get the submodel ids participating in this scene. const submodelId = sceneParams['params_to_sm'][si]; // Find path to its scene_params.json file. const filePath = carry.router.translate( submodelAssetPath(submodelId, 'scene_params.json')); // Construct path to scene_params.json for this submodel. sceneParamsPromises.push(loadJSONFile(filePath)); } // Wait for all scene_params.json files to be loaded. return Promise.all( [{...carry, initialSubmodelIndex}, ...sceneParamsPromises]); }) .then(loaded => { let [carry, ...submodelSceneParams] = loaded; for (let si = 0; si < submodelSceneParams.length; ++si) { // Override the scene params using the URL GET variables. submodelSceneParams[si] = extend(submodelSceneParams[si], overrideParams); // Build fake-filename-to-real-filename translator for this // submodel. const submodelId = submodelSceneParams[si]['params_to_sm'][si]; let subDirUrl = dirUrl; if (gUseSubmodel) { subDirUrl = `${subDirUrl}/${submodelAssetPath(submodelId)}`; } let submodelRouter = new Router(subDirUrl, carry.filenameToLink); // Load all assets related to this submodel. This is not a blocking // operation. // TODO: Consider loading this content on-demand and using an LRU // cache to bound memory usage. let submodelContent = initializeSceneContent(submodelSceneParams[si], submodelRouter); console.log(`spec for submodel #${si}:`, submodelContent.spec); // Register submodel content with the texture manager. registerSubmodelContent(si, submodelContent); } // Now that we know the submodel scale we can set the camera pose. let si = carry.initialSubmodelIndex; setupInitialCameraPose( dirUrl, submodelCenter(si, getSubmodelContent(si).params), ); // Instantiate scene & texture buffers. return Promise.all([si, initializeDeferredMlp(si)]); }).then(([si, _]) => { return initializePingPongBuffers(si); }).then(() => { return requestAnimationFrame(renderNextFrame); }); } // Initialiser la variable pour activer/désactiver les mouvements de la caméra let disableCameraControls = false; /** * Initializes the application based on the URL parameters. */ function initFromParameters() { // HTTP GET query parameters const params = new URL(window.location.href).searchParams; console.log(params); // Base directory for all assets. const dirUrl = 'nyc/sm_004'; console.log(dirUrl); // Controls platform-specific defaults: phone, low, medium, high. Not // const as benchmark=true can override it. let quality = params.get('quality'); // Initialize lowResFactor with a default value let lowResFactor = parseInt(params.get('downscale') || 1, 10); // Number of samples per voxel. Increase for slower rendering and fewer // artifacts. const stepMult = params.get('stepMult'); if (stepMult) { gStepMult = parseInt(stepMult, 10); } const frameMult = params.get('frameMult'); if (frameMult) { gFrameMult = parseInt(frameMult, 10); } // Manually specify exposure for exposure-aware models. const exposure = params.get('exposure'); if (exposure) { gExposure = parseFloat(exposure); } // For manually overriding parameters in scene_params.json. let overrideParams = {}; const benchmarkParam = params.get('benchmark'); const benchmark = benchmarkParam && (benchmarkParam.toLowerCase() === 'time' || benchmarkParam.toLowerCase() === 'quality'); if (benchmark) { overrideParams['loadBenchmarkCameras'] = true; quality = 'high'; const sceneNameChunks = dirUrl.split('/').slice(-2); setupBenchmarkStats( sceneNameChunks[0] + '_' + sceneNameChunks[1], benchmarkParam.toLowerCase() === 'quality'); } // snerg, vfr const deferredMode = params.get('deferredMode'); if (deferredMode) { overrideParams['deferred_rendering_mode'] = deferredMode; } // sum, concat_and_sum const combineMode = params.get('combineMode'); if (combineMode && combineMode === 'concat_and_sum') { overrideParams['merge_features_combine_op'] = 'coarse_sum'; } // are occupancy grids bitpacked? const useBits = params.get('useBits'); if (useBits) { overrideParams['useBits'] = useBits.toLowerCase() === 'true'; } // Use distance grid for calculating step sizes. const useDistanceGrid = params.get('useDistanceGrid'); if (useDistanceGrid) { overrideParams['useDistanceGrid'] = useDistanceGrid.toLowerCase() === 'true'; } // Load legacy scenes, where the distance & occupancy grids are stored // as a single monolithic file. const legacyGrids = params.get('legacyGrids'); if (legacyGrids) { overrideParams['legacyGrids'] = legacyGrids.toLowerCase() === 'true'; } // Sets the activation function of the DeferredMLP. Either "relu" or "elu". // Defaults to elu. const activation = params.get('activation'); if (activation) { overrideParams['activation'] = activation; } // Whether to use feature gating for the triplanes. Either "true" or "false". // Defaults to true. const featureGating = params.get('featureGating'); if (featureGating) { overrideParams['feature_gating'] = featureGating.toLowerCase() === 'true'; } // Limit the number of cached submodel payloads. const submodelCacheSize = params.get('submodelCacheSize'); if (submodelCacheSize) { gSubmodelCacheSize = Number(submodelCacheSize); } // Merge slices of assets together before binding to WebGL texture. const mergeSlices = params.get('mergeSlices'); if (mergeSlices) { overrideParams['merge_slices'] = mergeSlices == 'true'; } // The background color (in hex, e.g. #FF0000 for red) that the scene is // rendered on top of. Defaults to medium grey. const backgroundColor = params.get('backgroundColor'); if (backgroundColor) { overrideParams['backgroundColor'] = '#' + backgroundColor; } // Créer le conteneur de la vue et ajouter la classe const view = document.createElement('div'); view.classList.add('view'); // Appliquer les styles de style.css // Ajouter le conteneur de vue dans l'élément #viewspacecontainer avant de récupérer les dimensions const viewSpaceContainer = document.getElementById('viewspacecontainer'); viewSpaceContainer.appendChild(view); // Maintenant que l'élément est dans le DOM, on peut récupérer les dimensions définies par le CSS function getCssDimensions(element) { const styles = getComputedStyle(element); const width = parseInt(styles.width, 10); const height = parseInt(styles.height, 10); return { width, height }; } const { width: frameBufferWidth, height: frameBufferHeight } = getCssDimensions(view); // Appliquer les dimensions récupérées si besoin (optionnel) view.style.width = `${frameBufferWidth}px`; view.style.height = `${frameBufferHeight}px`; // Log dimensions for debugging console.log('Width:', frameBufferWidth, 'Height:', frameBufferHeight); // Continue with the rest of the script... // Vous pouvez définir ici d'autres valeurs par défaut spécifiques // Mouse mode: Default to "fps" if not set in the URL const mouseMode = params.get('mouseMode') || 'fps'; // No downscale factor specified, estimate it from the quality setting. let stepSizeVisibilityDelay = 0.99; if (!params.get('downscale') && quality) { let maxPixelsPerFrame = frameBufferWidth * frameBufferHeight; if (quality == 'phone') { // For iPhones. maxPixelsPerFrame = 300 * 450; stepSizeVisibilityDelay = 0.8; } else if (quality == 'low') { // For laptops with integrated GPUs. maxPixelsPerFrame = 600 * 250; stepSizeVisibilityDelay = 0.8; } else if (quality == 'medium') { // For laptops with dicrete GPUs. maxPixelsPerFrame = 1200 * 640; stepSizeVisibilityDelay = 0.95; } // else assume quality is 'high' and render at full res. while (frameBufferWidth * frameBufferHeight / lowResFactor > maxPixelsPerFrame) { lowResFactor++; } console.log('Automatically chose a downscaling factor of ' + lowResFactor); } overrideParams['useLargerStepsWhenOccluded'] = false; overrideParams['step_size_visibility_delay'] = stepSizeVisibilityDelay; // Near plane distance in world coordinates. const nearPlane = parseFloat(params.get('near') || 0.01); // FOV along screen height. Specified in degrees. const vfovy = parseFloat(params.get('vfovy') || 40.0); const viewSpace = document.querySelector('.viewspace'); viewSpace.textContent = ''; viewSpace.appendChild(view); // Créer le canvas et l'attacher à .view let canvas = document.createElement('canvas'); view.appendChild(canvas); // Appliquer les styles du parent (.view) au canvas via JavaScript canvas.style.width = '100%'; canvas.style.height = '100%'; canvas.style.border = getComputedStyle(view).border; canvas.style.borderRadius = getComputedStyle(view).borderRadius; canvas.style.boxSizing = getComputedStyle(view).boxSizing; // Add tool for visualizing framerate. gStats = Stats(); gStats.dom.style.position = 'absolute'; gStats.dom.style.display = 'none'; // Masquer complètement les stats viewSpace.appendChild(gStats.dom); gSubmodelPanel = gStats.addPanel(new Stats.Panel('SM', '#0ff', '#002')); gSubmodelPanel.update(getActiveSubmodelIndex()); gVMemPanel = gStats.addPanel(new Stats.Panel('MB VRAM', '#0ff', '#002')); gVMemPanel.update(0); // Show FPS; hide other panels. gStats.showPanel(0); // Set up a high performance WebGL context, making sure that anti-aliasing is // turned off. let gl = canvas.getContext('webgl2', { powerPreference: 'high-performance', alpha: false, stencil: true, precision: 'highp', depth: true, antialias: false, desynchronized: false, preserveDrawingBuffer: benchmarkParam && benchmarkParam.toLowerCase() === 'quality', }); gl.enable(gl.DEPTH_TEST); // Activer le test de profondeur gl.depthFunc(gl.LEQUAL); // Spécifier comment les comparaisons de profondeur doivent être effectuées gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1); gl.viewport(0, 0, canvas.width, canvas.height); gRenderer = new THREE.WebGLRenderer({ canvas: canvas, context: gl, }); // Set up the normal scene used for rendering. gCamera = new THREE.PerspectiveCamera( vfovy, // Vertical field of view (kept from your original setup) Math.trunc(view.offsetWidth / lowResFactor) / Math.trunc(view.offsetHeight / lowResFactor), // Aspect ratio (adjusted based on view size) nearPlane, // Near clipping plane (keep your original value) 2000 // Far clipping plane, increased from 100.0 to 2000. Adjust as needed for your scene ); gCamera.updateProjectionMatrix(); // Expose gCamera to global scope window.sceneCamera = gCamera; // Set up progressive rendering and renderer size (keeping your existing code) setupProgressiveRendering(view, lowResFactor); gRenderer.autoClear = false; gRenderer.setSize(view.offsetWidth, view.offsetHeight); // Enable depth testing gRenderer.setClearColor(0x000000, 1); setupCameraControls(mouseMode, view); // Using the default 'fps' if not specified let width = Math.trunc(view.offsetWidth / lowResFactor); let height = Math.trunc(view.offsetHeight / lowResFactor); setupViewport(width, height); loadScene(dirUrl, overrideParams) } /** * The main update function that gets called every frame. * * @param {number} t elapsed time between frames (ms). */ function renderNextFrame(t) { // Delete old submodels to keep memory usage in check. garbageCollectSubmodelPayloads(); // Attempt to set the current ray march scene. This will kick off the process // of instantiating a new scene if necessary. let submodelIndex = positionToSubmodel(gCamera.position, getActiveSubmodelContent().params); setCurrentRayMarchScene(submodelIndex); // setCurrentRayMarchScene() may not actually change the scene. Use the // index of the current active submodel instead. submodelIndex = getActiveSubmodelIndex(); let sceneParams = getSubmodelContent(submodelIndex).params; for (let i = 0; i < gFrameMult; ++i) { gSubmodelTransform = submodelTransform(submodelIndex, sceneParams); gSubmodelPanel.update(submodelIndex); gVMemPanel.update(getCurrentTextureUsageInBytes() / 1e6); // Condition pour désactiver les mouvements de la caméra lors du hover if (!disableCameraControls) { updateCameraControls(); // Mettre à jour les contrôles de la caméra uniquement si hover désactivé } // For benchmarking, we want to direcly set the projection matrix. if (!gBenchmark) { gCamera.updateProjectionMatrix(); } gCamera.updateMatrixWorld(); const currentSubmodelCenter = submodelCenter(submodelIndex, sceneParams); const submodelScale = getSubmodelScale(submodelIndex); let submodelCameraPosition = new THREE.Vector3().copy(gCamera.position); submodelCameraPosition.sub(currentSubmodelCenter); submodelCameraPosition.multiplyScalar(submodelScale); let shaderUniforms = getRayMarchScene().children[0].material.uniforms; // Make sure to free up GPU memory from the previous frames. if (!!shaderUniforms['weightsZero']['value']) { shaderUniforms['weightsZero']['value'].dispose(); } if (!!shaderUniforms['weightsOne']['value']) { shaderUniforms['weightsOne']['value'].dispose(); } if (!!shaderUniforms['weightsTwo']['value']) { shaderUniforms['weightsTwo']['value'].dispose(); } shaderUniforms['bias_0']['value'] = trilerpDeferredMlpBiases(submodelIndex, 0, submodelCameraPosition); shaderUniforms['bias_1']['value'] = trilerpDeferredMlpBiases(submodelIndex, 1, submodelCameraPosition); shaderUniforms['bias_2']['value'] = trilerpDeferredMlpBiases(submodelIndex, 2, submodelCameraPosition); shaderUniforms['weightsZero']['value'] = trilerpDeferredMlpKernel(submodelIndex, 0, submodelCameraPosition); shaderUniforms['weightsOne']['value'] = trilerpDeferredMlpKernel(submodelIndex, 1, submodelCameraPosition); shaderUniforms['weightsTwo']['value'] = trilerpDeferredMlpKernel(submodelIndex, 2, submodelCameraPosition); gRenderer.clear(); renderProgressively(); // Clear depth buffer to avoid depth conflicts } gStats.update(); // By default we schedule the next frame ASAP, but the benchmark mode can // override this by replacing this lambda. let scheduleNextFrame = () => { requestAnimationFrame(renderNextFrame); }; if (gBenchmark) { scheduleNextFrame = benchmarkPerformance(scheduleNextFrame); } scheduleNextFrame(); } /** * Starts the volumetric scene viewer application. */ function start() { initFromParameters(); addHandlers(); // Initialiser la caméra et le renderer avant d'utiliser gCamera et gRenderer if (!gCamera || !gRenderer) { console.error("gCamera ou gRenderer non initialisés."); return; } } document.addEventListener("DOMContentLoaded", start);