jmprzz
jmprzz
DIAdiscord.js - Imagine an app
Created by jmprzz on 8/14/2024 in #djs-voice
How to properly play audio/webm;codecs=opus buffers to a voice channel sent via WebSocket?
Trying to broadcast a stream of audio buffers being recorded as audio/webm;codecs=opus using MediaRecorder to a voice channel via a WebSocket, I know that the recording works because i can save the recording to a file and it plays in the browser. Here's my code:
// join channel if name contains radio
if (newVC?.name.toLocaleLowerCase().includes("radio")) {
if (newVC.members.size >= 1) {
// defining the voice connection
const connection = joinVoiceChannel({
channelId: newVC.id,
guildId: newVC.guild.id,
adapterCreator: newVC.guild.voiceAdapterCreator,
selfMute: false,
debug: true,
});

const audioPlayer = createAudioPlayer();
// websocket receiveing buffer as audio/webm;codecs=opus
// listener is a just custom subprotocol to broadcast audio to this client.
const ws = new WebSocket("ws://localhost:3000", "listener");


ws.addEventListener("open", () => {
console.log("Connection opened");
});

connection.subscribe(audioPlayer);

let stream = new Readable({
objectMode: true,
highWaterMark: 1024 * 1024 * 20,
read() {},
});

connection.on(VoiceConnectionStatus.Ready, () => {
ws.addEventListener("message", (event) => {
// pushing buffer to the stream
stream.push(event.data);

});
// creating resouce with stream
const audio = createAudioResource(stream);

// playing the buffers pushed to the stream
audioPlayer.play(audio);

audioPlayer.on("error", console.error);
audioPlayer.on("debug", console.log);

connection.on(VoiceConnectionStatus.Destroyed, async () => {
stream.destroy();
ws.close();
});
});
} // endif
} // endif
// join channel if name contains radio
if (newVC?.name.toLocaleLowerCase().includes("radio")) {
if (newVC.members.size >= 1) {
// defining the voice connection
const connection = joinVoiceChannel({
channelId: newVC.id,
guildId: newVC.guild.id,
adapterCreator: newVC.guild.voiceAdapterCreator,
selfMute: false,
debug: true,
});

const audioPlayer = createAudioPlayer();
// websocket receiveing buffer as audio/webm;codecs=opus
// listener is a just custom subprotocol to broadcast audio to this client.
const ws = new WebSocket("ws://localhost:3000", "listener");


ws.addEventListener("open", () => {
console.log("Connection opened");
});

connection.subscribe(audioPlayer);

let stream = new Readable({
objectMode: true,
highWaterMark: 1024 * 1024 * 20,
read() {},
});

connection.on(VoiceConnectionStatus.Ready, () => {
ws.addEventListener("message", (event) => {
// pushing buffer to the stream
stream.push(event.data);

});
// creating resouce with stream
const audio = createAudioResource(stream);

// playing the buffers pushed to the stream
audioPlayer.play(audio);

audioPlayer.on("error", console.error);
audioPlayer.on("debug", console.log);

connection.on(VoiceConnectionStatus.Destroyed, async () => {
stream.destroy();
ws.close();
});
});
} // endif
} // endif
11 replies