Webgl2 not loading png

So i have not been able to find any documentation on this problem. I have a 128×128 .png that is loaded by Webgl. Before hand,it loads a solid blue block before the image loads. The solid blue block loads and no errors pop up but the image isn’t able to load.

main.js:

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
import { TheShaders } from "./shaders.js";
import { initTextureBuffer2DVec2, isPowerOf2 } from "./texture.js";
import { loadTexture } from "./texture.js";
var shaders = new TheShaders();
var canvas = document.getElementById("canvaselement");
if (canvas === null)
    throw new Error("Could not find canvas element");
var gl = canvas.getContext("webgl2");
if (gl === null)
    throw new Error("Could not get WebGL context");
//triangle
var TestvertexShader = gl.createShader(gl.VERTEX_SHADER);
if (TestvertexShader === null)
    throw new Error("Could not establish vertex shader");
var TestvertexShaderCode = shaders.Testvertexshader;
// Step 3: Attach the shader code to the vertex shader
gl.shaderSource(TestvertexShader, TestvertexShaderCode);
// Step 4: Compile the vertex shader
gl.compileShader(TestvertexShader);
console.log(gl.getShaderInfoLog(TestvertexShader));
var TestfragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
if (TestfragmentShader === null)
    throw new Error("Could not establish fragment shader"); // handle possibility of null
// Step 2: Write the fragment shader code
var TestfragmentShaderCode = shaders.Testfragmentshader;
// Step 3: Attach the shader code to the fragment shader
gl.shaderSource(TestfragmentShader, TestfragmentShaderCode);
// Step 4: Compile the fragment shader
gl.compileShader(TestfragmentShader);
console.log(gl.getShaderInfoLog(TestfragmentShader));

var vertices = new Float32Array([0.5, -0.5, -0.5, -0.5, -0.5, 0.5, 0.5, 0.5]);
var indices = new Uint16Array([0, 1, 2, 0, 2, 3]);
var shaderProgram = gl.createProgram();
if (shaderProgram === null)
    throw new Error("Could not create shader program");
// Step 2: Attach the vertex and fragment shaders to the program
gl.attachShader(shaderProgram, TestvertexShader);
gl.attachShader(shaderProgram, TestfragmentShader);
gl.linkProgram(shaderProgram);
// Step 3: Activate the program as part of the rendering pipeline
if (gl.useProgram(shaderProgram) === null)
    throw new Error("shader program not able to use");
// Step 1: Initialize the array of vertices for our triangle
// Step 2: Create a new buffer object
var vertex_buffer = gl.createBuffer();
// Step 3: Bind the object to `gl.ARRAY_BUFFER`
gl.bindBuffer(gl.ARRAY_BUFFER, vertex_buffer);
// Step 4: Pass the array of vertices to `gl.ARRAY_BUFFER
gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);
// Create and bind the element buffer
var indexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indexBuffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, indices, gl.STATIC_DRAW);
initTextureBuffer2DVec2(gl, vertices, shaderProgram, "a_texcoord");
var Testtexture = loadTexture(gl, "pixil-frame-0 (3).png");
if(Testtexture === null){
    console.log("haioshdioas");
}
else{
    console.log("ok");
    console.log(Testtexture);
}

gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
// Tell WebGL we want to affect texture unit 0
gl.activeTexture(gl.TEXTURE0);
// Bind the texture to texture unit 0
gl.bindTexture(gl.TEXTURE_2D, Testtexture);
var TestuSampler = gl.getUniformLocation(shaderProgram, "u_texture");
// Tell the shader we bound the texture to texture unit 0
gl.uniform1i(TestuSampler, 0);
// Step 5: Get the location of the `coordinates` attribute of the vertex shader
var coordinates = gl.getAttribLocation(shaderProgram, "coordinates");
gl.vertexAttribPointer(coordinates, 2, gl.FLOAT, false, 8, 0);
// Step 6: Enable the attribute to receive vertices from the vertex buffer
gl.enableVertexAttribArray(coordinates);
gl.viewport(0, 0, canvas.width, canvas.height);
gl.clearColor(0.0, 0.5, 0.5, 1.0);
gl.enable(gl.DEPTH_TEST);
if (gl.clear(gl.COLOR_BUFFER_BIT) === null)
    throw new Error("uncleared");
gl.drawElements(gl.TRIANGLES, indices.length, gl.UNSIGNED_SHORT, 0);
console.log(gl.getParameter(gl.MAX_VERTEX_ATTRIBS) - 1);

shaders.js

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.TheShaders = void 0;
var TheShaders = /** @class */ (function () {
    function TheShaders() {
        this.Testvertexshader = "n  attribute vec2 coordinates;nattribute vec2 a_texcoord;nvarying vec2 v_texcoord;nn  void main(void) {n    gl_Position = vec4(coordinates, 0.0, 1.0);n     // Pass the texcoord to the fragment shader.n     v_texcoord = a_texcoord;n  }";
        this.Testfragmentshader = "n    uniform sampler2D u_texture;nvarying highp vec2 v_texcoord;n nvoid main() {n   gl_FragColor = texture2D(u_texture, v_texcoord);n}";
    }
    return TheShaders;
}());
export{TheShaders};

texture.js

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });

export {initTextureBuffer2DVec2, isPowerOf2,loadTexture};
// Initialize a texture and load an image.
// When the image finished loading copy it into the texture.
//
function loadTexture(gl, url) {
    var texture = gl.createTexture();
    gl.bindTexture(gl.TEXTURE_2D, texture);
    // Because images have to be downloaded over the internet
    // they might take a moment until they are ready.
    // Until then put a single pixel in the texture so we can
    // use it immediately. When the image has finished downloading
    // we'll update the texture with the contents of the image.
    var level = 0;
    var internalFormat = gl.RGBA;
    var width = 1;
    var height = 1;
    var border = 0;
    var srcFormat = gl.RGBA;
    var srcType = gl.UNSIGNED_BYTE;
    var pixel = new Uint8Array([0, 1, 255, 255]); // opaque blue
    gl.texImage2D(gl.TEXTURE_2D, level, internalFormat, width, height, border, srcFormat, srcType, pixel);
    var image = new Image();
    image.addEventListener('load', function() {
        image.src = url;
        gl.bindTexture(gl.TEXTURE_2D, texture);
        gl.texImage2D(gl.TEXTURE_2D, level, internalFormat, srcFormat, srcType, image);
        // WebGL1 has different requirements for power of 2 images
        // vs. non power of 2 images so check if the image is a
        // power of 2 in both dimensions.
        if (isPowerOf2(image.width) && isPowerOf2(image.height)) {
            // Yes, it's a power of 2. Generate mips.
            gl.generateMipmap(gl.TEXTURE_2D);
        }
        else {
            // No, it's not a power of 2. Turn off mips and set
            // wrapping to clamp to edge
            gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
            gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
            gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
            gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
        }
        console.log(gl.getError());
    });
    return texture;
}
function isPowerOf2(value) {
    return (value & (value - 1)) === 0;
}
function initTextureBuffer2DVec2(gl, textureCoordinates, shaderProgram, Attribname) {
    var textureCoordBuffer = gl.createBuffer();
    gl.bindBuffer(gl.ARRAY_BUFFER, textureCoordBuffer);
    var texcoordLocation = gl.getAttribLocation(shaderProgram, Attribname);
    console.log(texcoordLocation);
    gl.vertexAttribPointer(texcoordLocation, 2, gl.FLOAT, false, 0, 0);
    gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(textureCoordinates), gl.STATIC_DRAW);
    gl.enableVertexAttribArray(texcoordLocation);
    return textureCoordBuffer;
}

I tried to change the image from 100×100 to 128×128 as mentioned before. I searched for so long any resemblance to this problem and it didn’t help. I tried changing the shaders’ code but nothing worked.