jgalazm / Nami

0 stars 1 forks source link

Large bathymetries won't load #9

Open jgalazm opened 6 years ago

jgalazm commented 6 years ago

From @jgalazm on March 20, 2018 3:46

On #132 when loading a bathymetry of 4801 x 4201 cells I get this error:

TypeError: Cannot read property 'length' of undefined namigl.js:1378 at createTextureFromMatrix (/home/tsunamilab/github/tsunami-lab/namigl/src/namigl.js:1378:24) at start (/home/tsunamilab/github/tsunami-lab/namigl/src/namigl.js:1751:31) at new NAMI.Model (/home/tsunamilab/github/tsunami-lab/namigl/src/namigl.js:1773:6) at init (/home/tsunamilab/github/tsunami-lab/namigl/src/namigl.js:72:23) at fileString (/home/tsunamilab/github/tsunami-lab/namigl/src/namigl.js:179:26) at XMLHttpRequest.req.onreadystatechange (/home/tsunamilab/github/tsunami-lab/namigl/src/namigl.js:20:17) Then I modified the getFile function to print the length of the response as it is loaded


    let getFile = function(url, callback){
        let req = new XMLHttpRequest();
        req.open('GET', url, true);
        req.onreadystatechange = function (aEvt) {
          if(req.readyState == 3)
            console.log(req.responseText.length);
          if (req.readyState == 4) {
             if(req.status == 200)
               callback(req.responseText);
             else
                console.log("Error loading file \n"+url);
          }
        };
        req.send(null);
    }

and the result was this: 2768 6275072 13451264 18432000 25837568 36569088 52002816 59260928 73940992 92225536 101384192 106332160 115343360 124551168 128876544 133922816 138838016 143818752 148832256 153911296 159023104 164036608 168607744 173588480 177766400 182616064 187334656 192184320 197033984 201883648 206143488 211451904 215908352 220102656 224559104 229277696 234127360 237928448 242909184 247627776 251822080 256802816 261849088 266895360 0 0 0 0 0 0 0 0 0 0 0 0 0 (etc)

So for some reason it drops the response once it is too big.

A similar error appeared here, I will try to follow:

https://stackoverflow.com/questions/37402716/handle-xmlhttprequest-response-with-large-data

Copied from original issue: Inria-Chile/tsunami-lab#146

jgalazm commented 6 years ago

The answer suggests to use fetch instead of xmlhttprequest: https://blogs.windows.com/msedgedev/2016/05/24/fetch-and-xhr-limitations/

jgalazm commented 6 years ago

Before using fetch I'm checking xhr with blobs:

    let getFile = function(url, callback){
        var xhr = new XMLHttpRequest();
        xhr.open('GET', url, true);
        xhr.responseType = "blob";

        xhr.onreadystatechange = function (aEvt) {
          if (xhr.readyState == 4) {
             if(xhr.status == 200){
                 console.log(xhr.response.size)
                 let myReader = new FileReader();
                 myReader.addEventListener("progress", function(e){
                    console.log(e.srcElement.result.length);
                 });

                 myReader.addEventListener("loadend", function(e){
                    console.log(e.srcElement.result.length);
                 });
                 myReader.readAsText(xhr.response);

             }
             else
                console.log("Error loading file \n"+url);
          }

        };
        xhr.send(null);
    }

Here are events for FileReader

https://www.nczonline.net/blog/2012/05/22/working-with-files-in-javascript-part-3/

this solution was very slow and then returned 0 size blobs after 200mill

jgalazm commented 6 years ago

A solution for this problem was to store the array in binary form and read it using blobs into an arrayBuffer


    let getArrayFromFile = function(url, callback, format='ascii'){        
        let arr = [];

        var xhr = new XMLHttpRequest();
        xhr.open("GET", url, true);
        xhr.responseType = "arraybuffer";

        xhr.onload = function(oEvent) {
            const blob = new Blob([xhr.response], {type: "application"});
            const fileReader = new FileReader();
            let arrayBuffer;
            fileReader.onload = (event) =>{
                arrayBuffer = event.target.result;
                callback(new Float64Array(arrayBuffer));

            }
            fileReader.readAsArrayBuffer(blob);

        };

        xhr.send();

    }
jgalazm commented 6 years ago

This solution works for reading but now the browser crashes with the "4K" bathymetry :( ... maybe large files will only work in Node

jgalazm commented 6 years ago

When loading the Etopo 2' bathymetry the code crashes when creating the "raveledMatrix" here:

        let createTextureFromMatrix = function(matrix,textureId){

            let internalFormat = isWebGL2? gl.RGBA32F : gl.RGBA;   
            let format = gl.RGBA;
            let type = gl.FLOAT;

            let raveledMatrix = [];
            for(let j =0; j<matrix.length;j++){
                for(let i=0;i<matrix[0].length; i++){
                    raveledMatrix.push(matrix[j][i]);
                    raveledMatrix.push(0);
                    raveledMatrix.push(0);
                    raveledMatrix.push(1);
                }
            }

            let texture = createTextureFromData( 
                matrix[1].length, 
                matrix.length,
                raveledMatrix,
                textureId, internalFormat, format, type );

            return texture;    
        };

Maybe this can be done more efficiently?