1
0
Fork 0

Rename do_maybe_async() to schedule_gui()

This commit is contained in:
Robbert van der Helm 2022-10-23 15:11:05 +02:00
parent 4524719128
commit 31cda78201
12 changed files with 26 additions and 26 deletions

View file

@ -9,7 +9,7 @@ use crate::plugin::Plugin;
// # Safety
//
// The implementing wrapper needs to be able to handle concurrent requests, and it should perform
// the actual callback within [MainThreadQueue::do_maybe_async].
// the actual callback within [MainThreadQueue::schedule_gui].
pub trait InitContext<P: Plugin> {
/// Get the current plugin API.
fn plugin_api(&self) -> PluginApi;

View file

@ -12,7 +12,7 @@ use crate::plugin::Plugin;
// # Safety
//
// The implementing wrapper needs to be able to handle concurrent requests, and it should perform
// the actual callback within [MainThreadQueue::do_maybe_async].
// the actual callback within [MainThreadQueue::schedule_gui].
pub trait ProcessContext<P: Plugin> {
/// Get the current plugin API.
fn plugin_api(&self) -> PluginApi;

View file

@ -46,7 +46,7 @@ where
///
/// If the task queue is full, then this will return false.
#[must_use]
fn do_maybe_async(&self, task: T) -> bool;
fn schedule_gui(&self, task: T) -> bool;
/// Whether the calling thread is the event loop's main thread. This is usually the thread the
/// event loop instance was initialized on.

View file

@ -25,7 +25,7 @@ pub(crate) struct LinuxEventLoop<T, E> {
/// queue.
main_thread_id: ThreadId,
/// A thread that act as our worker thread. When [`do_maybe_async()`][Self::do_maybe_async()] is
/// A thread that act as our worker thread. When [`schedule_gui()`][Self::schedule_gui()] is
/// called, this thread will be woken up to execute the task on the executor. This is wrapped in
/// an `Option` so the thread can be taken out of it and joined when this struct gets dropped.
worker_thread: Option<JoinHandle<()>>,
@ -64,7 +64,7 @@ where
}
}
fn do_maybe_async(&self, task: T) -> bool {
fn schedule_gui(&self, task: T) -> bool {
if self.is_main_thread() {
self.executor.execute(task, true);
true

View file

@ -132,7 +132,7 @@ where
}
}
fn do_maybe_async(&self, task: T) -> bool {
fn schedule_gui(&self, task: T) -> bool {
if self.is_main_thread() {
self.executor.execute(task, true);
true

View file

@ -60,7 +60,7 @@ impl<P: ClapPlugin> ProcessContext<P> for WrapperProcessContext<'_, P> {
}
fn execute_gui(&self, task: P::BackgroundTask) {
let task_posted = self.wrapper.do_maybe_async(Task::PluginTask(task));
let task_posted = self.wrapper.schedule_gui(Task::PluginTask(task));
nih_debug_assert!(task_posted, "The task queue is full, dropping task...");
}

View file

@ -320,7 +320,7 @@ impl<P: ClapPlugin> EventLoop<Task<P>, Wrapper<P>> for Wrapper<P> {
panic!("What are you doing");
}
fn do_maybe_async(&self, task: Task<P>) -> bool {
fn schedule_gui(&self, task: Task<P>) -> bool {
if self.is_main_thread() {
self.execute(task, true);
true
@ -692,7 +692,7 @@ impl<P: ClapPlugin> Wrapper<P> {
let wrapper = wrapper.clone();
move |task| {
let task_posted = wrapper.do_maybe_async(Task::PluginTask(task));
let task_posted = wrapper.schedule_gui(Task::PluginTask(task));
nih_debug_assert!(task_posted, "The task queue is full, dropping task...");
}
}),
@ -1649,7 +1649,7 @@ impl<P: ClapPlugin> Wrapper<P> {
}
// After the state has been updated, notify the host about the new parameter values
let task_posted = self.do_maybe_async(Task::RescanParamValues);
let task_posted = self.schedule_gui(Task::RescanParamValues);
nih_debug_assert!(task_posted, "The task queue is full, dropping task...");
}
@ -1659,7 +1659,7 @@ impl<P: ClapPlugin> Wrapper<P> {
// to keep doing it this way to stay consistent with VST3.
let old_latency = self.current_latency.swap(samples, Ordering::SeqCst);
if old_latency != samples {
let task_posted = self.do_maybe_async(Task::LatencyChanged);
let task_posted = self.schedule_gui(Task::LatencyChanged);
nih_debug_assert!(task_posted, "The task queue is full, dropping task...");
}
}
@ -1677,7 +1677,7 @@ impl<P: ClapPlugin> Wrapper<P> {
if clamped_capacity != self.current_voice_capacity.load(Ordering::Relaxed) {
self.current_voice_capacity
.store(clamped_capacity, Ordering::Relaxed);
let task_posted = self.do_maybe_async(Task::VoiceInfoChanged);
let task_posted = self.schedule_gui(Task::VoiceInfoChanged);
nih_debug_assert!(task_posted, "The task queue is full, dropping task...");
}
}
@ -2353,7 +2353,7 @@ impl<P: ClapPlugin> Wrapper<P> {
check_null_ptr!((), plugin);
let wrapper = &*(plugin as *const Self);
// [Self::do_maybe_async] posts a task to the queue and asks the host to call this function
// [Self::schedule_gui] posts a task to the queue and asks the host to call this function
// on the main thread, so once that's done we can just handle all requests here
while let Some(task) = wrapper.tasks.pop() {
wrapper.execute(task, true);

View file

@ -68,7 +68,7 @@ impl<P: Plugin, B: Backend> ProcessContext<P> for WrapperProcessContext<'_, P, B
}
fn execute_gui(&self, task: P::BackgroundTask) {
let task_posted = self.wrapper.event_loop.do_maybe_async(task);
let task_posted = self.wrapper.event_loop.schedule_gui(task);
nih_debug_assert!(task_posted, "The task queue is full, dropping task...");
}

View file

@ -246,7 +246,7 @@ impl<P: Plugin, B: Backend> Wrapper<P, B> {
let wrapper = wrapper.clone();
move |task| {
let task_posted = wrapper.event_loop.do_maybe_async(task);
let task_posted = wrapper.event_loop.schedule_gui(task);
nih_debug_assert!(task_posted, "The task queue is full, dropping task...");
}
}),

View file

@ -62,7 +62,7 @@ impl<P: Vst3Plugin> ProcessContext<P> for WrapperProcessContext<'_, P> {
}
fn execute_gui(&self, task: P::BackgroundTask) {
let task_posted = self.inner.do_maybe_async(Task::PluginTask(task));
let task_posted = self.inner.schedule_gui(Task::PluginTask(task));
nih_debug_assert!(task_posted, "The task queue is full, dropping task...");
}
@ -94,7 +94,7 @@ impl<P: Vst3Plugin> GuiContext for WrapperGuiContext<P> {
}
fn request_resize(&self) -> bool {
let task_posted = self.inner.do_maybe_async(Task::RequestResize);
let task_posted = self.inner.schedule_gui(Task::RequestResize);
nih_debug_assert!(task_posted, "The task queue is full, dropping task...");
// TODO: We don't handle resize request failures right now. In practice this should however

View file

@ -55,7 +55,7 @@ pub(crate) struct WrapperInner<P: Vst3Plugin> {
/// A realtime-safe task queue so the plugin can schedule tasks that need to be run later on the
/// GUI thread. This field should not be used directly for posting tasks. This should be done
/// through [`Self::do_maybe_async()`] instead. That method posts the task to the host's
/// through [`Self::schedule_gui()`] instead. That method posts the task to the host's
/// `IRunLoop` instead of it's available.
///
/// This AtomicRefCell+Option is only needed because it has to be initialized late. There is no
@ -340,7 +340,7 @@ impl<P: Vst3Plugin> WrapperInner<P> {
let wrapper = wrapper.clone();
move |task| {
let task_posted = wrapper.do_maybe_async(Task::PluginTask(task));
let task_posted = wrapper.schedule_gui(Task::PluginTask(task));
nih_debug_assert!(task_posted, "The task queue is full, dropping task...");
}
}),
@ -367,13 +367,13 @@ impl<P: Vst3Plugin> WrapperInner<P> {
}
}
/// Either posts the function to the task queue using [`EventLoop::do_maybe_async()`] so it can
/// Either posts the function to the task queue using [`EventLoop::schedule_gui()`] so it can
/// be delegated to the main thread, executes the task directly if this is the main thread, or
/// runs the task on the host's `IRunLoop` if the GUI is open and it exposes one. This function
///
/// If the task queue is full, then this will return false.
#[must_use]
pub fn do_maybe_async(&self, task: Task<P>) -> bool {
pub fn schedule_gui(&self, task: Task<P>) -> bool {
let event_loop = self.event_loop.borrow();
let event_loop = event_loop.as_ref().unwrap();
if event_loop.is_main_thread() {
@ -388,9 +388,9 @@ impl<P: Vst3Plugin> WrapperInner<P> {
match &*self.plug_view.read() {
Some(plug_view) => match plug_view.do_maybe_in_run_loop(task) {
Ok(()) => true,
Err(task) => event_loop.do_maybe_async(task),
Err(task) => event_loop.schedule_gui(task),
},
None => event_loop.do_maybe_async(task),
None => event_loop.schedule_gui(task),
}
}
}
@ -517,7 +517,7 @@ impl<P: Vst3Plugin> WrapperInner<P> {
.borrow()
.as_ref()
.unwrap()
.do_maybe_async(Task::TriggerRestart(
.schedule_gui(Task::TriggerRestart(
RestartFlags::kParamValuesChanged as i32,
));
nih_debug_assert!(task_posted, "The task queue is full, dropping task...");
@ -528,7 +528,7 @@ impl<P: Vst3Plugin> WrapperInner<P> {
let old_latency = self.current_latency.swap(samples, Ordering::SeqCst);
if old_latency != samples {
let task_posted =
self.do_maybe_async(Task::TriggerRestart(RestartFlags::kLatencyChanged as i32));
self.schedule_gui(Task::TriggerRestart(RestartFlags::kLatencyChanged as i32));
nih_debug_assert!(task_posted, "The task queue is full, dropping task...");
}
}

View file

@ -515,7 +515,7 @@ impl<P: Vst3Plugin> Drop for RunLoopEventHandler<P> {
.borrow()
.as_ref()
.unwrap()
.do_maybe_async(task);
.schedule_gui(task);
}
if posting_failed {