Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix #133 #136

Merged
merged 5 commits into from
Sep 9, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
File renamed without changes.
16 changes: 9 additions & 7 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,15 @@ NOTES.md
/generator/src
.DS_Store

# Working files
TODOS.md
NOTES.md
run-wpt.*
all-check.sh
tests/junk.mjs
yarn.lock
issues

# Created by https://www.toptal.com/developers/gitignore/api/node
# Edit at https://www.toptal.com/developers/gitignore?templates=node

Expand Down Expand Up @@ -131,10 +140,3 @@ Cargo.lock

*.env

# Working files
TODOS.md
NOTES.md
run-wpt.*
all-check.sh
tests/junk.mjs
yarn.lock
4 changes: 2 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@ crate-type = ["cdylib"]

[dependencies]
crossbeam-channel = "0.5.12"
napi = { version="2.15", features=["napi9", "tokio_rt"] }
napi-derive = { version="2.15" }
napi = { version="2.16", features=["napi9", "tokio_rt"] }
napi-derive = { version="2.16" }
thread-priority = "1.1.0"
web-audio-api = "=0.45"
# web-audio-api = { path = "../web-audio-api-rs" }
Expand Down
2 changes: 2 additions & 0 deletions generator/rs/lib.tmpl.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
#![deny(clippy::all)]
// @todo - properly fix this clippy issue
#![allow(clippy::zero_repeat_side_effects)]
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

meh, I think this is a napi issue causing false positive in clippy

Copy link
Collaborator Author

@b-ma b-ma Sep 9, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yup I agree, let's see if the problem disappear in a future napi-rs release, I opened an issue to not forget that
cf. #137


use napi::{Env, JsObject, Result};
use napi_derive::module_exports;
Expand Down
2 changes: 1 addition & 1 deletion js/AudioContext.js
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ module.exports = function(jsExport, nativeBinding) {
// object after its instantiation, and that we don't have any initial `resume` call.
this[kNapiObj].listen_to_events();

// @todo - check if this is still required
// @todo - This is probably not requested anymore as the event listeners
// prevent garbage collection and process exit
const id = contextId++;
// store in process to prevent garbage collection
Expand Down
2 changes: 1 addition & 1 deletion js/AudioWorklet.js
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ const resolveModule = async (moduleUrl) => {
// get caller site from error stack trace
const callerSite = caller(2);

if (callerSite.startsWith('http')) {
if (callerSite.startsWith('http')) { // this branch exists for wpt where caller site is an url
let url;
// handle origin relative and caller path relative URLs
if (moduleUrl.startsWith('/')) {
Expand Down
22 changes: 9 additions & 13 deletions js/OfflineAudioContext.js
Original file line number Diff line number Diff line change
Expand Up @@ -82,24 +82,26 @@ module.exports = function patchOfflineAudioContext(jsExport, nativeBinding) {

// Add function to Napi object to bridge from Rust events to JS EventTarget
// They will be effectively registered on rust side when `startRendering` is called
this[kNapiObj][kOnStateChange] = (function(err, rawEvent) {
this[kNapiObj][kOnStateChange] = (function(_err, rawEvent) {
const event = new Event(rawEvent.type);
propagateEvent(this, event);
}).bind(this);

// This event is, per spec, the last trigerred one
this[kNapiObj][kOnComplete] = (function(err, rawEvent) {
// workaround the fact that this event seems to be triggered before
// workaround the fact that the oncomplete event is triggered before
// startRendering fulfills and that we want to return the exact same instance
if (this.#renderedBuffer === null) {
this.#renderedBuffer = new jsExport.AudioBuffer({ [kNapiObj]: rawEvent.renderedBuffer });
}
this.#renderedBuffer = new jsExport.AudioBuffer({ [kNapiObj]: rawEvent.renderedBuffer });

const event = new jsExport.OfflineAudioCompletionEvent(rawEvent.type, {
renderedBuffer: this.#renderedBuffer,
});

propagateEvent(this, event);
// delay event propagation to next tick that it is executed after startRendering fulfills
setTimeout(() => {
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think setTimeout(.., 0) can be expressed more efficiently with setImmediate

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yup thanks, sure I have been misleaded by mdn which marks it deprecated but it doesn't stand in node context

propagateEvent(this, event);
this.#renderedBuffer = null;
}, 0);
}).bind(this);
}

Expand Down Expand Up @@ -145,15 +147,9 @@ module.exports = function patchOfflineAudioContext(jsExport, nativeBinding) {
throwSanitizedError(err);
}

// release audio worklet, if any
// release audio worklets
await this.audioWorklet[kWorkletRelease]();

// workaround the fact that this event seems to be triggered before
// startRendering fulfills and that we want to return the exact same instance
if (this.#renderedBuffer === null) {
this.#renderedBuffer = new jsExport.AudioBuffer({ [kNapiObj]: nativeAudioBuffer });
}

return this.#renderedBuffer;
}

Expand Down
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,8 @@
"lint": "npx eslint index.cjs index.mjs && npx eslint js/*.js && npx eslint examples/*.mjs",
"preversion": "yarn install && npm run generate",
"postversion": "cargo bump $npm_package_version && git commit -am \"v$npm_package_version\" && node .scripts/check-changelog.mjs",
"test": "mocha tests",
"test": "mocha tests/*.spec.js",
"test:only": "mocha",
"wpt": "npm run build && node ./.scripts/wpt-harness.mjs",
"wpt:only": "node ./.scripts/wpt-harness.mjs"
},
Expand Down
7 changes: 7 additions & 0 deletions src/audio_buffer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,13 @@ use web_audio_api::{AudioBuffer, AudioBufferOptions};

pub(crate) struct NapiAudioBuffer(Option<AudioBuffer>);

// for debug purpose
// impl Drop for NapiAudioBuffer {
// fn drop(&mut self) {
// println!("NAPI: NapiAudioBuffer dropped");
// }
// }

impl NapiAudioBuffer {
pub fn create_js_class(env: &Env) -> Result<JsFunction> {
env.define_class(
Expand Down
10 changes: 9 additions & 1 deletion src/audio_context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -252,10 +252,18 @@ fn listen_to_events(ctx: CallContext) -> Result<JsUndefined> {

let k_onstatechange = crate::utils::get_symbol_for(ctx.env, "node-web-audio-api:onstatechange");
let statechange_cb = js_this.get_property(k_onstatechange).unwrap();
let context_clone = Arc::clone(&napi_context.0);

let mut statechange_tsfn = ctx.env.create_threadsafe_function(
&statechange_cb,
0,
|ctx: ThreadSafeCallContext<Event>| {
move |ctx: ThreadSafeCallContext<Event>| {
if context_clone.state() == AudioContextState::Closed {
// clear all context listeners, so that it can be garbage collected
context_clone.clear_onsinkchange();
context_clone.clear_onstatechange();
}

let mut event = ctx.env.create_object()?;
let event_type = ctx.env.create_string(ctx.value.type_)?;
event.set_named_property("type", event_type)?;
Expand Down
2 changes: 1 addition & 1 deletion src/audio_worklet_node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -426,7 +426,7 @@ fn process_audio_worklet(env: &Env, processors: &JsObject, args: ProcessorArgume
let queue_task = processor.get_property::<JsSymbol, JsFunction>(k_worklet_queue_task)?;
let js_cmd = env.create_string(&cmd)?;
let js_err = env.create_error(err)?;
queue_task.apply2(processor, js_cmd, js_err)?;
let _: Result<JsUnknown> = queue_task.apply2(processor, js_cmd, js_err);
}

Ok(())
Expand Down
2 changes: 2 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
// -------------------------------------------------------------------------- //

#![deny(clippy::all)]
// @todo - properly fix this clippy issue (really don't understand the problem...)
#![allow(clippy::zero_repeat_side_effects)]

use napi::{Env, JsObject, Result};
use napi_derive::module_exports;
Expand Down
28 changes: 18 additions & 10 deletions src/offline_audio_context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use crate::*;
#[derive(Clone)]
pub(crate) struct NapiOfflineAudioContext(Arc<OfflineAudioContext>, usize);

// for debug purpose
// // for debug purpose
// impl Drop for NapiOfflineAudioContext {
// fn drop(&mut self) {
// println!("NAPI: NapiOfflineAudioContext dropped");
Expand Down Expand Up @@ -115,21 +115,34 @@ fn start_rendering(ctx: CallContext) -> Result<JsObject> {
},
)?;

// unref tsfn so they do not prevent the process to exit
let _ = statechange_tsfn.unref(ctx.env);

context.set_onstatechange(move |e| {
statechange_tsfn.call(Ok(e), ThreadsafeFunctionCallMode::Blocking);
});

let k_oncomplete = crate::utils::get_symbol_for(ctx.env, "node-web-audio-api:oncomplete");
let complete_cb = js_this.get_property(k_oncomplete).unwrap();
let context_clone = Arc::clone(&napi_context.0);

let mut complete_tsfn = ctx.env.create_threadsafe_function(
&complete_cb,
0,
|ctx: ThreadSafeCallContext<OfflineAudioCompletionEvent>| {
move |ctx: ThreadSafeCallContext<OfflineAudioCompletionEvent>| {
// This is the last event that can be trigerred by this context, we can
// clear the listeners so that the context can be properly garbage collected
context_clone.clear_onstatechange();
context_clone.clear_oncomplete();

let raw_event = ctx.value;
let mut event = ctx.env.create_object()?;

let event_type = ctx.env.create_string("complete")?;
event.set_named_property("type", event_type)?;

// @fixme: this event is propagated before `startRedering` fulfills
// which is probaly wrong, so let's propagate the JS audio buffer
// and let JS handle the race condition
// This event is propagated before `startRendering` fulfills
// which is wrong, order is fixed on JS side.
let ctor = crate::utils::get_class_ctor(&ctx.env, "AudioBuffer")?;
let js_audio_buffer = ctor.new_instance(&[ctx.env.get_null()?])?;
let napi_audio_buffer = ctx.env.unwrap::<NapiAudioBuffer>(&js_audio_buffer)?;
Expand All @@ -142,13 +155,8 @@ fn start_rendering(ctx: CallContext) -> Result<JsObject> {
)?;

// unref tsfn so they do not prevent the process to exit
let _ = statechange_tsfn.unref(ctx.env);
let _ = complete_tsfn.unref(ctx.env);

context.set_onstatechange(move |e| {
statechange_tsfn.call(Ok(e), ThreadsafeFunctionCallMode::Blocking);
});

context.set_oncomplete(move |e| {
complete_tsfn.call(Ok(e), ThreadsafeFunctionCallMode::Blocking);
});
Expand Down
13 changes: 11 additions & 2 deletions tests/OfflineAudioContext.spec.mjs
Original file line number Diff line number Diff line change
@@ -1,18 +1,24 @@
import { assert } from 'chai';
import {
AudioContext,
AudioBuffer,
OfflineAudioContext,
} from '../index.mjs';

describe('# OfflineAudioContext', () => {
describe('## await startRendering()', () => {
it('buffer returned by startRedring and buffer from complete should be same instance', async () => {
it('buffer returned by startRendering and buffer from `oncomplete` event should be same instance', async () => {
const offline = new OfflineAudioContext(1, 48000, 48000);

let aResult = null;
let bResult = null;
let renderingEnded = false;

offline.addEventListener('complete', (e) => aResult = e.renderedBuffer);
offline.addEventListener('complete', (e) => {
// check that the complete event is triggered after startRendering fulfills
assert.isTrue(renderingEnded);
aResult = e.renderedBuffer;
});

const osc = offline.createOscillator();
osc.connect(offline.destination);
Expand All @@ -21,9 +27,12 @@ describe('# OfflineAudioContext', () => {
osc.stop(1.);

bResult = await offline.startRendering();
renderingEnded = true;
// make sure we received the event
await new Promise(resolve => setTimeout(resolve, 100));

assert.isTrue(aResult instanceof AudioBuffer);
assert.isTrue(bResult instanceof AudioBuffer);
assert.deepEqual(aResult, bResult);
});
});
Expand Down