DEV Community

Cover image for Making your CV talk 🤖 How to use express JS routes to listen for Web socket API?
Nikola Mitic
Nikola Mitic

Posted on

Making your CV talk 🤖 How to use express JS routes to listen for Web socket API?

All you need to do is to open web socket towards web socket api inside Express JS route. streamAudioAnswer func is doing exactly that.

Here is the flow:

  1. Client request api path
  2. Express Js Open web socket connection
  3. Express JS sends message to web socket api
  4. Web socket api responds
  5. Express JS takes response and returns it to the Client

🔗🔗🔗 For full implementation click here

Example of streamAudioAnswer usage.

  streamAudioAnswer({
    question: question,
    onChunkReceived: (chunk) => {
      const buffer = Buffer.from(chunk, "base64");
      res.write(buffer);
    },
    onChunkFinal: () => {
      res.end();
    },
    onError: (error) => {
      console.error(`WebSocket Error: ${error}`);
      res.status(500).send(`WebSocket Error: ${error}`);
    },
    onClose: (event) => {
      if (event.wasClean) {
        console.info(
          `Connection closed cleanly, code=${event.code}, reason=${event.reason}`
        );
      } else {
        console.warn("Connection died");
      }
      res.end();
    },
  });
Enter fullscreen mode Exit fullscreen mode

Function implementation:

export const streamAudioAnswer = ({
  question,
  onChunkReceived,
  onChunkFinal,
  onError,
  onClose,
}: {
  question: string;
  onChunkReceived: (audioChunk: string) => void;
  onChunkFinal: () => void;
  onError: (error: ErrorEvent) => void;
  onClose: (event: CloseEvent) => void;
}) => {
  const voiceId = "IcOKBAbsVAB6WkEg78QO";
  const model = "eleven_turbo_v2";
  const wsUrl = `wss://api.elevenlabs.io/v1/text-to-speech/${voiceId}/stream-input?model_id=${model}`;
  const socket = new WebSocket(wsUrl);

  socket.onopen = async function (_event) {
    console.log("OPEN SOCKET");

    const answerSource = await getAnswerSource();
    const answerChunks = await getAnswerChunks(answerSource, question);

    const bosMessage = {
      text: " ",
      voice_settings: {
        stability: 0.5,
        similarity_boost: 0.5,
      },
      xi_api_key: process.env.ELEVEN_LABS_API_KEY,
    };

    socket.send(JSON.stringify(bosMessage));

    for await (const text of textChunker(answerChunks)) {
      socket.send(JSON.stringify({ text: text, try_trigger_generation: true }));
    }

    const eosMessage = {
      text: "",
    };

    socket.send(JSON.stringify(eosMessage));
  };

  socket.onmessage = function (event) {
    const response = JSON.parse(event.data.toString());

    if (response.audio) {
      onChunkReceived(response.audio);
    } else {
      console.log("No audio data in the response");
    }

    if (response.isFinal) {
      console.log("Audio stream chunks final");
      onChunkFinal();
    }
  };

  socket.onerror = onError;

  socket.onclose = onClose;
};
Enter fullscreen mode Exit fullscreen mode

❤️If you would like to stay it touch please feel free to connect❤️

  1. X
  2. Linkedin
  3. nikola.mitic.dev@gmail.com

Top comments (0)