Skip to content

Commit

Permalink
Some fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
brummer10 committed Apr 24, 2024
1 parent 46cc919 commit c6af2f9
Show file tree
Hide file tree
Showing 4 changed files with 94 additions and 15 deletions.
8 changes: 4 additions & 4 deletions Ratatouille/NeuralAmpMulti.cc
Original file line number Diff line number Diff line change
Expand Up @@ -47,10 +47,10 @@ class NeuralAmpMulti {
std::string load_afile;
std::string load_bfile;

void clear_state_f();
void init(unsigned int sample_rate);
inline void clear_state_f();
inline void init(unsigned int sample_rate);
void connect(uint32_t port,void* data);
void compute(int count, float *input0, float *output0);
inline void compute(int count, float *input0, float *output0);
bool load_nam_afile();
bool load_nam_bfile();
void unload_nam_afile();
Expand Down Expand Up @@ -111,7 +111,7 @@ void NeuralAmpMulti::connect(uint32_t port,void* data)
}
}

void NeuralAmpMulti::compute(int count, float *input0, float *output0)
inline void NeuralAmpMulti::compute(int count, float *input0, float *output0)
{
if (!modela && !modelb) return;
if (output0 != input0)
Expand Down
13 changes: 10 additions & 3 deletions Ratatouille/Ratatouille.c
Original file line number Diff line number Diff line change
Expand Up @@ -203,9 +203,16 @@ static void file_load_response(void *w_, void* user_data) {
m->filename = strdup(*(const char**)user_data);
LV2_URID urid;
if ((strcmp(m->filename, "None") == 0)) {
if (old) urid = ps->uris.neural_model;
else if (old1) urid = ps->uris.conv_ir_file;
else urid = ps->uris.rtneural_model;
if (old) {
if ( m == &ps->ma) urid = ps->uris.neural_model;
else urid = ps->uris.neural_model1;
} else if (old1) {
if ( m == &ps->ir) urid = ps->uris.conv_ir_file;
else urid = ps->uris.conv_ir_file1;
} else {
if ( m == &ps->ma) urid = ps->uris.rtneural_model;
else urid = ps->uris.rtneural_model1;
}
} else if (ends_with(m->filename, "nam")) {
if ( m == &ps->ma) urid = ps->uris.neural_model;
else urid = ps->uris.neural_model1;
Expand Down
80 changes: 76 additions & 4 deletions Ratatouille/Ratatouille.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
#include <cmath>
#include <iostream>
#include <cstring>
#include <thread>
#include <unistd.h>

#include "resampler.cc"
Expand Down Expand Up @@ -57,10 +58,31 @@ using std::max;
#include "gx_convolver.cc"
#include "gx_convolver.h"

////////////////////////////// PLUG-IN CLASS ///////////////////////////

namespace ratatouille {

class Xratatouille;

///////////////////////// INTERNAL WORKER CLASS //////////////////////

class XratatouilleWorker {
private:
std::atomic<bool> _execute;
std::thread _thd;
std::mutex m;

public:
XratatouilleWorker();
~XratatouilleWorker();
void stop();
void start(Xratatouille *xr);
std::atomic<bool> is_done;
bool is_running() const noexcept;
std::condition_variable cv;
};

////////////////////////////// PLUG-IN CLASS ///////////////////////////

class Xratatouille
{
private:
Expand All @@ -71,6 +93,7 @@ class Xratatouille
GxConvolver conv;
gx_resample::StreamingResampler resamp1;
GxConvolver conv1;
XratatouilleWorker xrworker;

int32_t rt_prio;
int32_t rt_policy;
Expand Down Expand Up @@ -141,6 +164,7 @@ class Xratatouille
inline void do_work_mono();
inline void deactivate_f();
public:
inline void do_non_rt_work(Xratatouille* xr) {return xr->do_work_mono();};
inline void map_uris(LV2_URID_Map* map);
inline LV2_Atom* write_set_file(LV2_Atom_Forge* forge,
const LV2_URID xlv2_model, const char* filename);
Expand Down Expand Up @@ -193,7 +217,7 @@ Xratatouille::Xratatouille() :
input0(NULL),
output0(NULL),
_blend(0),
_mix(0) {};
_mix(0) {xrworker.start(this);};

// destructor
Xratatouille::~Xratatouille() {
Expand All @@ -202,8 +226,54 @@ Xratatouille::~Xratatouille() {
conv.cleanup();
conv1.stop_process();
conv1.cleanup();
xrworker.stop();
};

///////////////////////// INTERNAL WORKER CLASS //////////////////////

XratatouilleWorker::XratatouilleWorker()
: _execute(false),
is_done(false) {
}

XratatouilleWorker::~XratatouilleWorker() {
if( _execute.load(std::memory_order_acquire) ) {
stop();
};
}

void XratatouilleWorker::stop() {
_execute.store(false, std::memory_order_release);
if (_thd.joinable()) {
cv.notify_one();
_thd.join();
}
}

void XratatouilleWorker::start(Xratatouille *xr) {
if( _execute.load(std::memory_order_acquire) ) {
stop();
};
_execute.store(true, std::memory_order_release);
_thd = std::thread([this, xr]() {
while (_execute.load(std::memory_order_acquire)) {
std::unique_lock<std::mutex> lk(m);
// wait for signal from dsp that work is to do
cv.wait(lk);
//do work
if (_execute.load(std::memory_order_acquire)) {
xr->do_non_rt_work(xr);
}
}
// when done
});
}

bool XratatouilleWorker::is_running() const noexcept {
return ( _execute.load(std::memory_order_acquire) &&
_thd.joinable() );
}

///////////////////////// PRIVATE CLASS FUNCTIONS /////////////////////

inline void Xratatouille::map_uris(LV2_URID_Map* map) {
Expand Down Expand Up @@ -602,7 +672,8 @@ void Xratatouille::run_dsp_(uint32_t n_samples)
if (!_execute.load(std::memory_order_acquire)) {
bufsize = n_samples;
_execute.store(true, std::memory_order_release);
schedule->schedule_work(schedule->handle, sizeof(bool), &doit);
xrworker.cv.notify_one();
//schedule->schedule_work(schedule->handle, sizeof(bool), &doit);
}
}
}
Expand All @@ -612,7 +683,8 @@ void Xratatouille::run_dsp_(uint32_t n_samples)
if (!_execute.load(std::memory_order_acquire) && _restore.load(std::memory_order_acquire)) {
_execute.store(true, std::memory_order_release);
bufsize = n_samples;
schedule->schedule_work(schedule->handle, sizeof(bool), &doit);
xrworker.cv.notify_one();
//schedule->schedule_work(schedule->handle, sizeof(bool), &doit);
_restore.store(false, std::memory_order_release);
}

Expand Down
8 changes: 4 additions & 4 deletions Ratatouille/RtNeuralMulti.cc
Original file line number Diff line number Diff line change
Expand Up @@ -45,10 +45,10 @@ class RtNeuralMulti {
std::string load_afile;
std::string load_bfile;

void clear_state_f();
void init(unsigned int sample_rate);
inline void clear_state_f();
inline void init(unsigned int sample_rate);
void connect(uint32_t port,void* data);
void compute(int count, float *input0, float *output0);
inline void compute(int count, float *input0, float *output0);
void get_samplerate(std::string config_file, int *mSampleRate);
bool load_json_afile();
bool load_json_bfile();
Expand Down Expand Up @@ -108,7 +108,7 @@ void RtNeuralMulti::connect(uint32_t port,void* data)
}
}

void RtNeuralMulti::compute(int count, float *input0, float *output0)
inline void RtNeuralMulti::compute(int count, float *input0, float *output0)
{
if (!modela && !modelb) return;
if (output0 != input0)
Expand Down

0 comments on commit c6af2f9

Please sign in to comment.