Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Initial implementation of audio input ring buffer. #268

Open
wants to merge 13 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 43 additions & 0 deletions example/interface.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,36 @@
import { Vircadia, DomainServer, Camera, AudioMixer, AvatarMixer, EntityServer, MessageMixer, Vec3, Uuid }
from "../dist/Vircadia.js";

// https://dbaron.org/log/20100309-faster-timeouts
(function () {
const timeouts = [];
const messageName = "zero-timeout-message";

// Like setTimeout, but only takes a function argument. There's
// no time argument (always zero) and no arguments (you have to
// use a closure).
function setZeroTimeout(fn) {
timeouts.push(fn);
window.postMessage(messageName, "*");
}

function handleMessage(event) {
if (event.source === window && event.data === messageName) {
event.stopPropagation();
if (timeouts.length > 0) {
const fn = timeouts.shift();
fn();
}
}
}

window.addEventListener("message", handleMessage, true);

// Add the one thing we want added to the window object.
window.setZeroTimeout = setZeroTimeout;
}());


(function () {

const DEFAULT_URL = "ws://127.0.0.1:40102";
Expand Down Expand Up @@ -922,11 +952,23 @@ import { Vircadia, DomainServer, Camera, AudioMixer, AvatarMixer, EntityServer,
gameLoopTimer = setTimeout(gameLoop, timeout);
};

let runAudioLoop = false;
const audioLoop = () => {
audioMixer.update();
if (runAudioLoop) {
window.setZeroTimeout(audioLoop);
}
};

const connectButton = document.getElementById("domainConnectButton");
connectButton.addEventListener("click", () => {
if (gameLoopTimer === null) {
gameLoopTimer = setTimeout(gameLoop, 0);
}
if (!runAudioLoop) {
runAudioLoop = true;
window.setZeroTimeout(audioLoop);
}
});

const disconnectButton = document.getElementById("domainDisconnectButton");
Expand All @@ -936,6 +978,7 @@ import { Vircadia, DomainServer, Camera, AudioMixer, AvatarMixer, EntityServer,
gameLoopTimer = null;
gameRateValue.value = "";
}
runAudioLoop = false;
});

}());
Expand Down
11 changes: 11 additions & 0 deletions src/AudioMixer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -234,6 +234,17 @@ class AudioMixer extends AssignmentClient {
return this.#_audioOutput.pause();
}

/*@sdkdoc
* Event loop update method for audio processing. This should be called as
* often as possible to keep up with the audio worklets.
* @function AudioMixer.update
*/
update(): void {
// C++ void Application::update(float deltaTime)

this.#_audioClient.update();

}

/*@sdkdoc
* Triggered when the audio mixer has made the client mute its audio input — either because the background noise is
Expand Down
11 changes: 9 additions & 2 deletions src/domain/audio-client/AudioClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -274,6 +274,13 @@ class AudioClient {
return this.#_mutedByMixer.signal();
}

/*@devdoc
* Event loop method for audio processing.
* @function AudioClient.update
*/
update(): void {
this.#_audioInput.processRingBuffer();
}

#start(): void {
// C++ void AudioClient::start()
Expand Down Expand Up @@ -318,7 +325,7 @@ class AudioClient {
this.#_isStereoInput = false;

if (this.#_audioInput.isStarted()) {
this.#_audioInput.readyRead.disconnect(this.#handleMicAudioInput);
this.#_audioInput.setFrameCallback(undefined);
await this.#_audioInput.stop();
}

Expand Down Expand Up @@ -350,7 +357,7 @@ class AudioClient {
});

if (isStarted) {
this.#_audioInput.readyRead.connect(this.#handleMicAudioInput);
this.#_audioInput.setFrameCallback(this.#handleMicAudioInput);
supportedFormat = true;
} else {
console.error("[audioclient] Error starting audio input -", this.#_audioInput.errorString());
Expand Down
86 changes: 68 additions & 18 deletions src/domain/audio/AudioInput.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import AudioWorklets from "./AudioWorklets";
import AudioConstants from "../audio/AudioConstants";
import assert from "../shared/assert";
import SignalEmitter, { Signal } from "../shared/SignalEmitter";
import { RingBuffer } from "../audio/RingBuffer";


/*@devdoc
Expand Down Expand Up @@ -46,8 +47,14 @@ class AudioInput {
#_isSuspended = false;
#_errorString = "";
#_frameBuffer: Array<Int16Array> = [];
#_currentFrame: Int16Array = new Int16Array();
#_currentFrameSize = 0;
#_ringBufferStorage = {} as SharedArrayBuffer;
#_ringBuffer = {} as RingBuffer<Int16Array>;
#_channelCount = 1;

#_readyRead = new SignalEmitter();
#_frameCallback: (() => void) | undefined = undefined;

#_audioWorkletRelativePath = "";

Expand Down Expand Up @@ -205,7 +212,7 @@ class AudioInput {
// C++ QIODevice::readAll()
let frame: Int16Array | undefined = undefined;
if (this.#_frameBuffer.length > 0) {
frame = this.#_frameBuffer.pop();
frame = this.#_frameBuffer.shift();
}
if (frame === undefined) {
this.#_errorString = "Unexpected read of empty audio input buffer!";
Expand Down Expand Up @@ -236,25 +243,58 @@ class AudioInput {
return this.#_readyRead.signal();
}

/*@devdoc
* Set a callback that will be called each time new network frames of audio input are available for reading.
* @function AudioInput.setFrameCallback
* @returns {Signal}
*/
setFrameCallback(callback?: () => void): void {
if (callback) {
this.#_frameCallback = callback;
} else {
this.#_frameCallback = undefined;
}
}

/*@devdoc
* Receives the next network frame of data from the audio input from the {@link AudioInputProcessor} used, triggering a
* {@link AudioInput.readyRead} signal.
* @function AudioInput.processAudioInputString
* @param {MessageEvent<ArrayBuffer>} The PCM audio data.
* @returns {Slot}
* Reads pending audio data from the {@link AudioInputProcessor},
* accumulates it into frames and triggers {@link
* AudioInput.readyRead} signal when ready to send.
* @function AudioInput.processRingBuffer
*/
processAudioInputMessage = (message: MessageEvent<ArrayBuffer>): void => {
processRingBuffer(): void {
// C++ N/A

const frame = new Int16Array(message.data);
this.#_frameBuffer.push(frame);
if (this.#_isStarted) {
let available = this.#_ringBuffer.availableRead();
while (available !== 0) {
const requiredForFrame = this.#_currentFrame.length - this.#_currentFrameSize;
const read = this.#_ringBuffer.pop(
this.#_currentFrame,
Math.min(available, requiredForFrame),
this.#_currentFrameSize
);
available -= read;
this.#_currentFrameSize += read;
if (this.#_currentFrameSize === this.#_currentFrame.length) {
this.#_frameBuffer.push(this.#_currentFrame);
this.#_currentFrame = this.#createFrame();
this.#_currentFrameSize = 0;
// WEBRTC TODO: Could perhaps throttle the #_readyRead.emit()s on the understanding that
// multiple packets will be processed by the method connected to the signal.
this.#_readyRead.emit();
this.#_frameCallback?.();
}
}
}

// WEBRTC TODO: Could perhaps throttle the #_readyRead.emit()s on the understanding that multiple packets will be
// processed by the method connected to the signal.
this.#_readyRead.emit();
};
}

#createFrame(): Int16Array {
return new Int16Array(this.#_channelCount === 1
? AudioConstants.NETWORK_FRAME_SAMPLES_PER_CHANNEL
: AudioConstants.NETWORK_FRAME_SAMPLES_STEREO);
}

// Sets up the AudioContext etc.
async #setUpAudioContext(): Promise<boolean> {
Expand Down Expand Up @@ -285,10 +325,18 @@ class AudioInput {

// TODO: The SDK should just use the number of channels that the input device has, up to a maximum of 2 (stereo).
// due to lack reliable ways to retrieve channel count across browsers, we are hard coding mono input for now.
const channelCount = 1;
assert(channelCount > 0);
this.#_channelCount = 1;
assert(this.#_channelCount > 0);
// The channel count has already been checked in AudioClient.#switchInputToAudioDevice().

this.#_currentFrame = this.#createFrame();

const RING_BUFFER_LENGTH_IN_SECONDS = 0.1;
const ringBufferCapacity = this.#_channelCount * (AudioConstants.SAMPLE_RATE * RING_BUFFER_LENGTH_IN_SECONDS);
this.#_ringBufferStorage = RingBuffer.getStorageForCapacity(
ringBufferCapacity, Int16Array);
this.#_ringBuffer = new RingBuffer(this.#_ringBufferStorage, Int16Array);

// Audio worklet.
if (!this.#_audioContext.audioWorklet) {
this.#_errorString = "Cannot set up audio input stream. App may not be being served via HTTPS or from localhost.";
Expand All @@ -299,11 +347,13 @@ class AudioInput {
this.#_audioInputProcessor = new AudioWorkletNode(this.#_audioContext, "vircadia-audio-input-processor", {
numberOfInputs: 1,
numberOfOutputs: 0,
channelCount,
channelCountMode: "explicit"
channelCount: this.#_channelCount,
channelCountMode: "explicit",
processorOptions: {
ringBufferStorage: this.#_ringBufferStorage
}
});
this.#_audioInputProcessorPort = this.#_audioInputProcessor.port;
this.#_audioInputProcessorPort.onmessage = this.processAudioInputMessage;

// Wire up the nodes.
this.#_audioStreamSource.connect(this.#_audioInputProcessor);
Expand Down
Loading