How to Set Custom HTML Structure for Multi-Page App in Vite?

I am trying to implement a multi-page application using Vite and vanilla JavaScript. My project structure looks like this:

src/
│── assets/
│── pages/
│   ├── home/
│   │   ├── index.html
│   │   └── styles.css
│   └── contact/
│       ├── index.html
│       └── styles.css

However, I’m having issues with build.rollupOptions.input in Vite’s configuration. It seems Vite expects a structure like this:

src/
│── assets/
│── index.html
│── contact.html

Here is my Vite configuration:

import { dirname, resolve } from "node:path";
import { fileURLToPath } from "node:url";
import { defineConfig } from "vite";

const __dirname = dirname(fileURLToPath(import.meta.url));

export default defineConfig({
  root: "src",
  server: {
    port: 3000,
  },
  build: {
    rollupOptions: {
      input: {
        main: resolve(__dirname, "src/pages/home/index.html"),
        contact: resolve(__dirname, "src/pages/contact/index.html"),
      },
    },
    outDir: "../dist",
  },
});

Problems:

  • I can’t specify the entry HTML files in my custom structure.
  • I can’t set a custom folder structure for my HTML files.

How can I configure Vite to work with this custom structure?

Making WebRTC as fast as possible

Below is my node-js WebRTC server and im wondering how I can make it as fast as possible. Things i’ve done so far is in the OBS virtual camera lower my resolution down as much as possible and lower the framerate to 30. The server is being run with node-js and im using a TURN server for clients being behind NAT’s.

server.js

const express = require("express");
const http = require("http");
const socketIo = require("socket.io");

const app = express();
const server = http.createServer(app);
const io = socketIo(server);

let broadcaster;
const port = 4000;

io.sockets.on("error", (e) => console.log(e));
io.sockets.on("connection", (socket) => {
  console.log("A user connected:", socket.id, socket.handshake.address);

  socket.on("broadcaster", () => {
    broadcaster = socket.id;
    socket.broadcast.emit("broadcaster");
    console.log(socket.id, "is broadcasting");
  });

  socket.on("watcher", () => {
    console.log(socket.id, "is watching");
    socket.to(broadcaster).emit("watcher", socket.id);
  });

  socket.on("offer", (id, message) => {
    socket.to(id).emit("offer", socket.id, message);
    console.log(socket.id, "sent an offer to", id);
  });

  socket.on("answer", (id, message) => {
    socket.to(id).emit("answer", socket.id, message);
    console.log(socket.id, "sent an answer to", id);
  });

  socket.on("candidate", (id, message) => {
    socket.to(id).emit("candidate", socket.id, message);
    console.log(socket.id, "sent a candidate to", id);
  });

  socket.on("disconnect", () => {
    console.log("A user disconnected:", socket.id);
    socket.to(broadcaster).emit("disconnectPeer", socket.id);
  });
});

server.listen(port, "0.0.0.0", () =>
  console.log(`Server is running on http://0.0.0.0:${port}`)
);

broadcast.html

<!DOCTYPE html>
<html>
<head>
    <title>Broadcaster</title>
    <meta charset="UTF-8" />
</head>
<body>
    <video playsinline autoplay muted></video>
    <script src="https://cdn.jsdelivr.net/npm/socket.io-client@4/dist/socket.io.js"></script>
    <script>
        const peerConnections = {};
        const config = {
          iceServers: [
              
          ],
        };

        const socket = io.connect('http://:4000');

        socket.on("answer", (id, description) => {
            peerConnections[id].setRemoteDescription(description);
        });

        socket.on("watcher", id => {
            const peerConnection = new RTCPeerConnection(config);
            peerConnections[id] = peerConnection;

            let stream = videoElement.srcObject;
            stream.getTracks().forEach(track => peerConnection.addTrack(track, stream));

            peerConnection.onicecandidate = event => {
                if (event.candidate) {
                    socket.emit("candidate", id, event.candidate);
                }
            };

            peerConnection
                .createOffer()
                .then(sdp => peerConnection.setLocalDescription(sdp))
                .then(() => {
                    socket.emit("offer", id, peerConnection.localDescription);
                });
        });

        socket.on("candidate", (id, candidate) => {
            peerConnections[id].addIceCandidate(new RTCIceCandidate(candidate));
        });

        socket.on("disconnectPeer", id => {
            peerConnections[id].close();
            delete peerConnections[id];
        });

        window.onunload = window.onbeforeunload = () => {
            socket.close();
        };

        // Get camera stream
        const videoElement = document.querySelector("video");
        navigator.mediaDevices.getUserMedia({ video: true })
            .then(stream => {
                videoElement.srcObject = stream;
                socket.emit("broadcaster");
            })
            .catch(error => console.error("Error: ", error));
    </script>
</body>
</html>

watch.html

<!DOCTYPE html>
<html>
<head>
    <title>Broadcaster</title>
    <meta charset="UTF-8" />
</head>
<body>
    <video playsinline autoplay muted></video>
    <script src="https://cdn.jsdelivr.net/npm/socket.io-client@4/dist/socket.io.js"></script>
    <script>
        const peerConnections = {};
        const config = {
          iceServers: [
              ,
          ],
        };

        const socket = io.connect('http://:4000');

        socket.on("answer", (id, description) => {
            peerConnections[id].setRemoteDescription(description);
        });

        socket.on("watcher", id => {
            const peerConnection = new RTCPeerConnection(config);
            peerConnections[id] = peerConnection;

            let stream = videoElement.srcObject;
            stream.getTracks().forEach(track => peerConnection.addTrack(track, stream));

            peerConnection.onicecandidate = event => {
                if (event.candidate) {
                    socket.emit("candidate", id, event.candidate);
                }
            };

            peerConnection
                .createOffer()
                .then(sdp => peerConnection.setLocalDescription(sdp))
                .then(() => {
                    socket.emit("offer", id, peerConnection.localDescription);
                });
        });

        socket.on("candidate", (id, candidate) => {
            peerConnections[id].addIceCandidate(new RTCIceCandidate(candidate));
        });

        socket.on("disconnectPeer", id => {
            peerConnections[id].close();
            delete peerConnections[id];
        });

        window.onunload = window.onbeforeunload = () => {
            socket.close();
        };

        // Get camera stream
        const videoElement = document.querySelector("video");
        navigator.mediaDevices.getUserMedia({ video: true })
            .then(stream => {
                videoElement.srcObject = stream;
                socket.emit("broadcaster");
            })
            .catch(error => console.error("Error: ", error));
    </script>
</body>
</html>

How to implement color/luminosity blendmodes in webgl?

I am currently using a webgl library and it allows blendmodes such as normal, erase, add, multiply, subtract, etc. Those use the blendEquationSeparate, blendEquation, and blendFunc functions. I’d supply these values to the variables, for example, someObject.blendMode=”add”, and then the library will handle the rest.

However, I’d like to implement a color blendMode ala Photoshop, the type that is like luminosity blendMode with this algorithm:

float luminosity = 0.299 * color1.r + 0.587 * color1.g + 0.114 * color1.b;

But for color, the algorithm I got from https://www.w3.org/TR/compositing-1/#blending is

B(Cb, Cs) = SetLum(Cs, Lum(Cb))

After some digging, I did not stumble upon many promising solutions, but I did find some answers saying that I actually need to add custom shaders. Examples I found online did not work and even if they did I am unsure how to insert the custom shaders in, since the library already has a base fragment shader and vertex shader that it uses. My understanding of webgl is limited, though I am learning.

Search by first character of a word from TD of a HTML Table

my table contains many columns and numerous entries. I am filtering the table with the help of a code, but filter results many searched words containing all those characters in them. For example:- If I enter ‘ab’ in textbox it filter the table record with strings contains the keyword ‘ab’ like abcd, babe, cfab, dkabh etc.
But my requirement is when I enter the keyword ‘ab’ in textbox it should osearch only those string which starts from ‘ab’ like abcd, abdc etc. etc. Can someone, amend the following code, thanks…

function myFunction() {
  var input, filter, table, tr, td, i, txtValue;
  input = document.getElementById("myInput");
  filter = input.value.toUpperCase();
  table = document.getElementById("myTable");
  tr = table.getElementsByTagName("tr");
  for (i = 0; i < tr.length; i++) {
    td = tr[i].getElementsByTagName("td")[0];
    if (td) {
      txtValue = td.textContent || td.innerText;
      if (txtValue.toUpperCase().indexOf(filter) > -1) {
        tr[i].style.display = "";
      } else {
        tr[i].style.display = "none";
      }
    }       
  }
}

How to tell Firefox to hide the element identified by “CustomizableUI.AREA_BOOKMARKS”?

On MDN, it states here that the element ID for the Bookmarks sidebar is reported by

  • CustomizableUI.AREA_BOOKMARKS

Can someone please tell me how I can use that to force Firefox to hide the Bookmarks sidebar? I don’t want a “toggle” function because, I need to ignore whichever state it might be in, at the time of the JavaScript call from the loaded HTML page, and force a “hidden”, “collapsed” or “not-displayed” mode.

I tried the following as inline scripting, but it did not have the desired effect, likely because I don’t have the proper references to tell the browser properly.

<script>
    var sidebar = browser.getElementById("CustomizableUI.AREA_BOOKMARKS") ;
    sidebar.hide() ;
</script>

Rotated, center aligned, multiline text in JsPDF

I’m using JsPDF to generate some documents.
I need a piece of text to be rotated 180.
It has a max width so it’s multiple lines.
It also needs to be center aligned.

Currently i’m doing this:

doc.text(description, x, y, {
 angle: 180,
 maxWidth: 80,
 align: 'center',
});

The result is that only the first line of text gets rendered on the PDF. All the other lines don’t appear.

Does anyone know how to fix this?

How to handle extraction of zip files in my web application?

I am designing app which will be proccesing data from uploaded by user zip file. I am wondering if it is ok to handle it without backend, I do not want to save data. At this moment plan is to extract date and prepare output for user. Do I really need a backend in this case, or will it be much more difficult without it?

NextJS Server Action cannot read data from exported list

I have a NextJS Project that uses instrumentations.ts to create a websocket server. I am trying to access the connected clients from a server action however, any exported variables are defaulted to null even after they have been modified in a function.

WebSocketServer Code:

export let wss: WebSocketServer;

export function createWS() {
    if(Boolean(wss)) return wss;
    wss = new WebSocketServer({ port: 7777 });

    wss.on('listening', () => {
        console.log('listening on port 7777');
    });

    wss.on('message', (message) => {
        console.log('message received');
        console.log(message);
    });

    wss.on('connection', connectedToWSS);

    return wss;
}

Server Action Code:

'use server';

import { wss } from "@/utils/websocket";

export async function testWebSocket() {
    console.log(wss?.clients.size);
}

Instrumentation Code:

import { createWS } from './utils/websocket';


export async function register() {
    createWS();
}

I expect that the wss imported from the websocket file would exist after calling createWS from instrumentation, however wss only exists within the websocket file and I cannot import it without it being undefined.

HERE Maps API for JavaScript v2.5.4 returning 429 “Too Many” Requests – Version Retired?

We are using the HERE Maps API for JavaScript v2.5.4. It has been working flawlessly for years, until today (2/15/25). Today, it is returning 429 “Too Many Requests” messages for many (but not all) of our requests. Basically, we get the map pins we requested on a plain blue background.

It looks to us like our account with HERE is current, but we ARE using a much earlier version of the API. We are using the following to connect:

We are using an App Key and an App Code to connect the the API. Back in v2.5.4, we didn’t use an API Key, but we have created one, and tried to add it to the parameters we use to connect. No luck.

Has the version of the API we’re trying to use stopped working? Or do we need to include another setting with our API Key? Or is something wrong with our account?

Replace words and the curly braces

If we have a sentence such as :

Greetings dear {{name}}

What we need to do is if anyone of the curlys is deleted,then the whole object i.e. {{name}} will be deleted along with any spaces surrounding the object.

We have looked at several regex constructs such as :

s.replace(/s?{+[^}]+}+/g, ''); 

But they do not do what we need.
Any assistance would be greatly appreciated.

Capturing audio with JavaScript and play it right away throws error at audio

I have a JavaScript file which should capture audio and then save it. When I started to work on saving it to the server, the first file was fine but all after were corrupted. So I decided to check in the browser what I’m sending. Then I figured out the same thing: the first audio clip is fine, but it kinda looks like that the second file is getting corrupted. I decided to check on another device to check if my laptop could cause the problem, but same issue. The error which im getting is super unclear for me: NotSupportedError: Failed to load because no supported source was found.. I used Chrome and Safari but both browsers and both laptops throw the same error.

Error from the console:

untitled.html:40 Blob {size: 81199, type: 'audio/webm'}
untitled.html:46 Playback failed: NotSupportedError: Failed to load because no supported source was found.
(anonymous) @ untitled.html:46
Promise.catch
playAudio @ untitled.html:46
mediaRecorder.ondataavailable @ untitled.html:27Understand this errorAI
untitled.html:40 Blob {size: 81536, type: 'audio/webm'}
untitled.html:46 Playback failed: NotSupportedError: Failed to load because no supported source was found.

HTML (to test, save it as a .html)

<html>
<head>
  <title>Example</title>
</head>

<script>

  let audioStream;
  let mediaRecorder;


  navigator.mediaDevices.getUserMedia({
    video: true,
    audio: true
  })
    .then(stream => {
      audioStream = new MediaStream(stream.getAudioTracks());
      mediaRecorder = new MediaRecorder(audioStream, {
        mimeType: "audio/webm"
      });



      mediaRecorder.ondataavailable = (event) => {

        if (event.data.size > 0) {
          playAudio(event.data);
        }
      };

      mediaRecorder.start(5000);
    })
    .catch(err => {
      console.error(err);
    });

  const playAudio = (audioChunk) => {
    const audioBlob = new Blob([audioChunk], { type: "audio/webm" }); // Ensure correct MIME type
    const audioUrl = URL.createObjectURL(audioBlob);
    console.log(audioBlob);

    const audioElement = document.createElement("audio");
    audioElement.src = audioUrl;
    audioElement.controls = true;
    document.body.appendChild(audioElement);
    audioElement.play().catch((e) => console.error("Playback failed:", e));
  };
</script>
</html>

Developing encryption/decryption app using Blazor [closed]

I want to create an encryption/decryption application, that does client-side encryption. I want that all the encryption thing to happen on the client side, since I understood that this is more secure and reliable.

But can I do this only with Blazor and C#, without Javascript? And if yes, what type of Blazor application is necessary? I am a little bit confused about the options Visual Studio has.

enter image description here