main_crystalizereq/CrystalizerEQ/PluginProcessor.cpp

942 lines
31 KiB
C++
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

/*
==============================================================================
This file contains the basic framework code for a JUCE plugin processor.
==============================================================================
*/
#include "PluginProcessor.h"
#include "PluginEditor.h"
// ==================== Parameter-Layout ====================
juce::AudioProcessorValueTreeState::ParameterLayout
CrystalizerEQAudioProcessor::createParameterLayout() {
std::vector<std::unique_ptr<juce::RangedAudioParameter>> params;
params.push_back (std::make_unique<juce::AudioParameterBool>(
"TestNoiseEnabled", "Test Noise Enabled", false));
params.push_back (std::make_unique<juce::AudioParameterFloat>("TestNoiseLevel", "Test Noise Level", juce::NormalisableRange<float>(-60.f, 0.f, 0.1f), -18.f));
//LOW-BAND
params.push_back (std::make_unique<juce::AudioParameterFloat>(
"LowBandFreq", "LowBand Freq",
juce::NormalisableRange<float>(20.f, 20000.f, 1.f, 0.5f), 30.f));
params.push_back (std::make_unique<juce::AudioParameterFloat>("LowBandGain", "LowBand Gain", juce::NormalisableRange<float>(-24.f, 24.f, 0.1f), 0.f));
params.push_back (std::make_unique<juce::AudioParameterFloat>("LowBandQ", "LowBand Q", juce::NormalisableRange<float>(0.1f, 10.f, 0.01f), 1.f));
params.push_back (std::make_unique<juce::AudioParameterChoice>(
"LowBandSlope", "LowBand Slope", // Anzeigename
juce::StringArray { "12", "24", "36", "48"}, 1));
params.push_back (std::make_unique<juce::AudioParameterChoice>(
"LowBandModes", // Parameter-ID
"Low Band Modes", // Anzeigename
juce::StringArray { "Off", "Cut", "Shelf", "Bell" }, // Einträge
2));
//PEAK 1
params.push_back (std::make_unique<juce::AudioParameterFloat>(
"Peak1Freq", "Peak1 Freq",
juce::NormalisableRange<float>(100.f, 1500.f, 1.f, 0.5f), 250.f));
params.push_back (std::make_unique<juce::AudioParameterFloat>(
"Peak1Gain", "Peak1 Gain (dB)",
juce::NormalisableRange<float>(-24.f, 24.f, 0.1f), 0.f));
params.push_back (std::make_unique<juce::AudioParameterFloat>(
"Peak1Q", "Peak1 Q",
juce::NormalisableRange<float>(0.1f, 10.f, 0.01f), 1.f));
params.push_back (std::make_unique<juce::AudioParameterBool>("Peak1Bypass", "Peak1 Bypass", false));
//PEAK 2
params.push_back (std::make_unique<juce::AudioParameterFloat>(
"Peak2Freq", "Peak2 Freq",
juce::NormalisableRange<float>(400.f, 6000.f, 1.f, 0.5f), 1500.f));
params.push_back (std::make_unique<juce::AudioParameterFloat>(
"Peak2Gain", "Peak2 Gain (dB)",
juce::NormalisableRange<float>(-48.f, 48.f, 0.1f), 0.f));
params.push_back (std::make_unique<juce::AudioParameterFloat>(
"Peak2Q", "Peak2 Q",
juce::NormalisableRange<float>(0.1f, 10.f, 0.01f), 1.f));
params.push_back (std::make_unique<juce::AudioParameterBool>("Peak2Bypass", "Peak2 Bypass", false));
//PEAK 3
params.push_back (std::make_unique<juce::AudioParameterFloat>(
"Peak3Freq", "Peak3 Freq",
juce::NormalisableRange<float>(1000.f, 16000.f, 1.f, 0.5f), 6000.f));
params.push_back (std::make_unique<juce::AudioParameterFloat>(
"Peak3Gain", "Peak3 Gain (dB)",
juce::NormalisableRange<float>(-24.f, 24.f, 0.1f), 0.f));
params.push_back (std::make_unique<juce::AudioParameterFloat>(
"Peak3Q", "Peak3 Q",
juce::NormalisableRange<float>(0.1f, 10.f, 0.01f), 1.f));
params.push_back (std::make_unique<juce::AudioParameterBool>("Peak3Bypass", "Peak3 Bypass", false));
//HIGH BAND
params.push_back (std::make_unique<juce::AudioParameterFloat>(
"HighBandFreq", "HighBand Freq",
juce::NormalisableRange<float>(20.f, 20000.f, 1.f, 0.5f), 17000.f));
params.push_back (std::make_unique<juce::AudioParameterFloat>("HighBandGain", "HighBand Gain", juce::NormalisableRange<float>(-24.f, 24.f, 0.1f), 0.f));
params.push_back (std::make_unique<juce::AudioParameterFloat>("HighBandQ", "HighBand Q", juce::NormalisableRange<float>(0.1f, 10.f, 0.01f), 1.f));
params.push_back (std::make_unique<juce::AudioParameterChoice>(
"HighBandSlope", "HighBand Slope", // Anzeigename
juce::StringArray { "12", "24", "36", "48"}, 1));
params.push_back (std::make_unique<juce::AudioParameterChoice>(
"HighBandModes", // Parameter-ID
"High Band Modes", // Anzeigename
juce::StringArray { "Off", "Cut", "Shelf", "Bell" }, // Einträge
2));
//
params.push_back (std::make_unique<juce::AudioParameterFloat>("InputGain", "Input Gain", juce::NormalisableRange<float>(-30.f, 30.0f, 0.1f), 0.0f));
params.push_back (std::make_unique<juce::AudioParameterFloat>("OutputGain", "Output Gain", juce::NormalisableRange<float>(-30.f, 30.0f, 0.1f), 0.0f));
params.push_back (std::make_unique<juce::AudioParameterBool>("CrystalizeButton", "Crystalize Button", false));
params.push_back (std::make_unique<juce::AudioParameterBool>("MasterBypass", "MasterBypass", false));
return { params.begin(), params.end() };
}
//==============================================================================
CrystalizerEQAudioProcessor::CrystalizerEQAudioProcessor()
#ifndef JucePlugin_PreferredChannelConfigurations
: AudioProcessor (BusesProperties()
#if ! JucePlugin_IsMidiEffect
#if ! JucePlugin_IsSynth
.withInput ("Input", juce::AudioChannelSet::stereo(), true)
#endif
.withOutput ("Output", juce::AudioChannelSet::stereo(), true)
#endif
)
#endif
{
}
CrystalizerEQAudioProcessor::~CrystalizerEQAudioProcessor() = default;
//==============================================================================
const juce::String CrystalizerEQAudioProcessor::getName() const
{
return JucePlugin_Name;
}
bool CrystalizerEQAudioProcessor::acceptsMidi() const
{
#if JucePlugin_WantsMidiInput
return true;
#else
return false;
#endif
}
bool CrystalizerEQAudioProcessor::producesMidi() const
{
#if JucePlugin_ProducesMidiOutput
return true;
#else
return false;
#endif
}
bool CrystalizerEQAudioProcessor::isMidiEffect() const
{
#if JucePlugin_IsMidiEffect
return true;
#else
return false;
#endif
}
double CrystalizerEQAudioProcessor::getTailLengthSeconds() const
{
return 0.0;
}
int CrystalizerEQAudioProcessor::getNumPrograms()
{
return 1; // NB: some hosts don't cope very well if you tell them there are 0 programs,
// so this should be at least 1, even if you're not really implementing programs.
}
int CrystalizerEQAudioProcessor::getCurrentProgram()
{
return 0;
}
void CrystalizerEQAudioProcessor::setCurrentProgram (int index)
{
}
const juce::String CrystalizerEQAudioProcessor::getProgramName (int index)
{
return {};
}
void CrystalizerEQAudioProcessor::changeProgramName (int index, const juce::String& newName)
{
}
AudioFIFO::AudioFIFO(){
}
AudioFIFO::~AudioFIFO() {
}
//==============================================================================
void CrystalizerEQAudioProcessor::prepareToPlay (double sampleRate, int samplesPerBlock)
{
// Use this method as the place to do any pre-playback
// initialisation that you need..
juce::dsp::ProcessSpec spec;
spec.sampleRate = sampleRate;
spec.maximumBlockSize = static_cast<juce::uint32> (samplesPerBlock);
spec.numChannels = static_cast<juce::uint32> (getTotalNumOutputChannels());
mbLowpass.prepare(spec);
mbHighpass.prepare(spec);
mbLowpass.reset();
mbHighpass.reset();
mbHighPeak.prepare(spec);
mbHighPeak.reset();
saturator.prepare(spec);
saturator.functionToUse = [] (float x) {
return std::tanh(x);
};
saturator.reset();
leftChain.prepare (spec);
rightChain.prepare (spec);
leftChain.reset();
rightChain.reset();
updateFilters(); // initiale Koeffizienten
}
static void setCoeffs(juce::dsp::IIR::Filter<float>& f,
juce::dsp::IIR::Coefficients<float>::Ptr c)
{
f.coefficients = c; // ok
// oder: *f.coefficients = *c;
}
void CrystalizerEQAudioProcessor::updateFilters()
{
const auto sr = getSampleRate();
const auto lowFreq = apvts.getRawParameterValue("LowBandFreq")->load();
const auto lowSlope = static_cast<int>(apvts.getRawParameterValue("LowBandSlope")->load());
const auto lowGdB = apvts.getRawParameterValue("LowBandGain")->load(); // falls angelegt
const auto lowQ = apvts.getRawParameterValue("LowBandQ")->load();
const int lowBandModes =
static_cast<int>(apvts.getRawParameterValue("LowBandModes")->load());
const auto peak1F = apvts.getRawParameterValue("Peak1Freq")->load();
const auto peak1GdB = apvts.getRawParameterValue("Peak1Gain")->load();
const auto peak1Q = apvts.getRawParameterValue("Peak1Q")->load();
const auto peak1Bypass = apvts.getRawParameterValue("Peak1Bypass")->load();
const auto peak2F = apvts.getRawParameterValue("Peak2Freq")->load();
const auto peak2GdB = apvts.getRawParameterValue("Peak2Gain")->load();
const auto peak2Q = apvts.getRawParameterValue("Peak2Q")->load();
const auto peak2Bypass = apvts.getRawParameterValue("Peak2Bypass")->load();
const auto peak3F = apvts.getRawParameterValue("Peak3Freq")->load();
const auto peak3GdB = apvts.getRawParameterValue("Peak3Gain")->load();
const auto peak3Q = apvts.getRawParameterValue("Peak3Q")->load();
const auto peak3Bypass = apvts.getRawParameterValue("Peak3Bypass")->load();
const auto highFreq = apvts.getRawParameterValue("HighBandFreq")->load();
const auto highSlope = static_cast<int>(apvts.getRawParameterValue("HighBandSlope")->load());
const auto highGdB = apvts.getRawParameterValue("HighBandGain")->load(); // falls angelegt
const auto highQ = apvts.getRawParameterValue("HighBandQ")->load();
const int highBandModes =
static_cast<int>(apvts.getRawParameterValue("HighBandModes")->load());
const auto inputGdB = apvts.getRawParameterValue("InputGain")->load();
const auto outputGdB = apvts.getRawParameterValue("OutputGain")->load();
auto peak1 = juce::dsp::IIR::Coefficients<float>::makePeakFilter(sr, peak1F, peak1Q,
juce::Decibels::decibelsToGain(peak1GdB));
auto peak2 = juce::dsp::IIR::Coefficients<float>::makePeakFilter(sr, peak2F, peak2Q,
juce::Decibels::decibelsToGain(peak2GdB));
auto peak3 = juce::dsp::IIR::Coefficients<float>::makePeakFilter(sr, peak3F, peak3Q,
juce::Decibels::decibelsToGain(peak3GdB));
const auto crystalized = static_cast<int>(apvts.getRawParameterValue("CrystalizeButton")->load());
juce::dsp::IIR::Coefficients<float>::Ptr lowBand;
juce::dsp::IIR::Coefficients<float>::Ptr highBand;
// links
leftChain.get<0>().setGainDecibels (inputGdB);
rightChain.get<0>().setGainDecibels (inputGdB);
setCoeffs (leftChain.get<5>(), peak1);
setCoeffs (rightChain.get<5>(), peak1);
setCoeffs (leftChain.get<6>(), peak2);
setCoeffs (rightChain.get<6>(), peak2);
setCoeffs (leftChain.get<7>(), peak3);
setCoeffs (rightChain.get<7>(), peak3);
const auto crystalizedShelf = juce::dsp::IIR::Coefficients<float>::makeHighShelf(sr, 12000.0f, 1.0f, juce::Decibels::decibelsToGain(4.0f));
setCoeffs(leftChain.get<12>(), crystalizedShelf);
setCoeffs(rightChain.get<12>(), crystalizedShelf);
const auto crystalizedBell = juce::dsp::IIR::Coefficients<float>::makePeakFilter(sr, 10000.0f, 1.0f, juce::Decibels::decibelsToGain(2.0f));
setCoeffs(leftChain.get<13>(), crystalizedBell);
setCoeffs(rightChain.get<13>(), crystalizedBell);
leftChain.get<14>().setGainDecibels (outputGdB);
rightChain.get<14>().setGainDecibels (outputGdB);
if (peak1Bypass) {
leftChain.setBypassed<5>(true);
rightChain.setBypassed<5>(true);
} else {
leftChain.setBypassed<5>(false);
rightChain.setBypassed<5>(false);
}
if (peak2Bypass) {
leftChain.setBypassed<6>(true);
rightChain.setBypassed<6>(true);
} else {
leftChain.setBypassed<6>(false);
rightChain.setBypassed<6>(false);
}
if (peak3Bypass) {
leftChain.setBypassed<7>(true);
rightChain.setBypassed<7>(true);
} else {
leftChain.setBypassed<7>(false);
rightChain.setBypassed<7>(false);
}
const float lowGainLin = juce::Decibels::decibelsToGain(lowGdB);
switch (lowBandModes) {
case 0:
leftChain.setBypassed<1>(true);
rightChain.setBypassed<1>(true);
leftChain.setBypassed<2>(true);
rightChain.setBypassed<2>(true);
leftChain.setBypassed<3>(true);
rightChain.setBypassed<3>(true);
leftChain.setBypassed<4>(true);
rightChain.setBypassed<4>(true);
break;
case 1: {
const auto q = lowQ;
lowBand = juce::dsp::IIR::Coefficients<float>::makeHighPass(sr, lowFreq, q);
setCoeffs(leftChain .get<1>(), lowBand);
setCoeffs(rightChain.get<1>(), lowBand);
leftChain .setBypassed<1>(false);
rightChain.setBypassed<1>(false);
setCoeffs (leftChain.get<2>(), lowBand);
setCoeffs(rightChain.get<2>(), lowBand);
setCoeffs (leftChain.get<3>(), lowBand);
setCoeffs(rightChain.get<3>(), lowBand);
setCoeffs (leftChain.get<4>(), lowBand);
setCoeffs(rightChain.get<4>(), lowBand);
//HIER SLOPE IMPLEMENTIEREN
leftChain .setBypassed<2>(lowSlope < 2);
rightChain.setBypassed<2>(lowSlope < 2);
leftChain .setBypassed<3>(lowSlope < 3);
rightChain.setBypassed<3>(lowSlope < 3);
leftChain .setBypassed<4>(lowSlope < 4);
rightChain.setBypassed<4>(lowSlope < 4);
break;
}
case 2:
// Q steuert die „Güte“/Steilheit des Übergangs
lowBand = juce::dsp::IIR::Coefficients<float>::makeLowShelf(sr, lowFreq, lowQ, lowGainLin);
setCoeffs(leftChain .get<1>(), lowBand);
setCoeffs(rightChain.get<1>(), lowBand);
leftChain .setBypassed<1>(false);
rightChain.setBypassed<1>(false);
break;
case 3: // Bell (Glocke optional, falls du das Low-Band als Bell nutzen willst)
lowBand = juce::dsp::IIR::Coefficients<float>::makePeakFilter(sr, lowFreq, lowQ, lowGainLin);
setCoeffs(leftChain .get<1>(), lowBand);
setCoeffs(rightChain.get<1>(), lowBand);
leftChain .setBypassed<1>(false);
rightChain.setBypassed<1>(false);
break;
}
//HIGH-BAND
const float highGainHin = juce::Decibels::decibelsToGain(highGdB);
switch (highBandModes) {
case 0:
leftChain.setBypassed<8>(true);
rightChain.setBypassed<8>(true);
leftChain.setBypassed<9>(true);
rightChain.setBypassed<9>(true);
leftChain.setBypassed<10>(true);
rightChain.setBypassed<10>(true);
leftChain.setBypassed<11>(true);
rightChain.setBypassed<11>(true);
break;
case 1: {
const auto q = highQ;
highBand = juce::dsp::IIR::Coefficients<float>::makeLowPass(sr, highFreq, q);
setCoeffs(leftChain .get<8>(), highBand);
setCoeffs(rightChain.get<8>(), highBand);
leftChain .setBypassed<8>(false);
rightChain.setBypassed<8>(false);
setCoeffs (leftChain.get<9>(), highBand);
setCoeffs(rightChain.get<9>(), highBand);
setCoeffs (leftChain.get<10>(), highBand);
setCoeffs(rightChain.get<10>(), highBand);
setCoeffs (leftChain.get<11>(), highBand);
setCoeffs(rightChain.get<11>(), highBand);
//HIER SLOPE IMPLEMENTIEREN
leftChain .setBypassed<9>(highSlope < 2);
rightChain.setBypassed<9>(highSlope < 2);
leftChain .setBypassed<10>(highSlope < 3);
rightChain.setBypassed<10>(highSlope < 3);
leftChain .setBypassed<11>(highSlope < 4);
rightChain.setBypassed<11>(highSlope < 4);
break;
}
case 2:
// Q steuert die „Güte“/Steilheit des Übergangs
highBand = juce::dsp::IIR::Coefficients<float>::makeHighShelf(sr, highFreq, highQ, highGainHin);
setCoeffs(leftChain .get<8>(), highBand);
setCoeffs(rightChain.get<8>(), highBand);
leftChain .setBypassed<8>(false);
rightChain.setBypassed<8>(false);
break;
case 3: // Bell (Glocke optional, falls du das Low-Band als Bell nutzen willst)
highBand = juce::dsp::IIR::Coefficients<float>::makePeakFilter(sr, highFreq, highQ, highGainHin);
setCoeffs(leftChain .get<8>(), highBand);
setCoeffs(rightChain.get<8>(), highBand);
leftChain .setBypassed<8>(false);
rightChain.setBypassed<8>(false);
break;
}
if (!crystalized) {
leftChain .setBypassed<12>(true);
rightChain.setBypassed<12>(true);
leftChain .setBypassed<13>(true);
rightChain.setBypassed<13>(true);
} else {
leftChain .setBypassed<12>(false);
rightChain.setBypassed<12>(false);
leftChain .setBypassed<13>(false);
rightChain.setBypassed<13>(false);
}
}
juce::String CrystalizerEQAudioProcessor::savePresetToFile() const {
const auto nameInput = getPresetName();
auto desktop = juce::File::getSpecialLocation(juce::File::userDesktopDirectory);
// Unterordner auf dem Desktop anlegen
auto presetFolder = desktop.getChildFile("CrystalizerEQ_Presets");
presetFolder.createDirectory();
// Datei vorbereiten
auto file = presetFolder.getNonexistentChildFile(nameInput, ".xml");
juce::ValueTree preset ("Preset");
preset.setProperty("name", nameInput, nullptr);
for (auto* p : getParameters()) {
if (p == nullptr) continue;
if (auto* ranged = dynamic_cast<juce::RangedAudioParameter*>(p))
{
if (ranged->getParameterID() == "MasterBypass")
{continue;}
juce::ValueTree param ("Param");
param.setProperty("id", ranged->getParameterID(), nullptr);
param.setProperty("value", ranged->getValue(), nullptr);
preset.addChild(param, -1, nullptr);
}
}
std::unique_ptr<juce::XmlElement> xml (preset.createXml());
xml->writeToFile(juce::File(file), {});
return file.getFileNameWithoutExtension();
}
void CrystalizerEQAudioProcessor::loadPreset(const juce::String &preset){
auto desktop = juce::File::getSpecialLocation(juce::File::userDesktopDirectory);
auto presetFolder = desktop.getChildFile("CrystalizerEQ_Presets");
juce::Array<juce::File> files = presetFolder.findChildFiles(
juce::File::findFiles, // nur Dateien (nicht Ordner)
false, // nicht rekursiv (kein Durchsuchen von Unterordnern)
"*.xml" // Pattern: alle Dateien
);
for (const auto& f : files) {
if (f.getFileName() != preset) {
continue;
}
std::unique_ptr<juce::XmlElement> xml (juce::XmlDocument::parse(f));
if (xml == nullptr)
return;
for (auto* p : getParameters()) {
if (p == nullptr) continue;
if (auto* ranged = dynamic_cast<juce::RangedAudioParameter*>(p)) {
if (auto* child = xml->getFirstChildElement())
{
while (child != nullptr)
{
juce::String id = child->getStringAttribute("id");
if (id == ranged->getParameterID()) {
float value = (float) child->getDoubleAttribute("value");
ranged->beginChangeGesture();
ranged->setValueNotifyingHost(value);
ranged->endChangeGesture();
break;
}
child = child->getNextElement();
}
}
}
}
}
}
void CrystalizerEQAudioProcessor::deletePreset(const juce::String &preset) const{
auto desktop = juce::File::getSpecialLocation(juce::File::userDesktopDirectory);
// Unterordner auf dem Desktop anlegen
auto presetFolder = desktop.getChildFile("CrystalizerEQ_Presets");
juce::Array<juce::File> files = presetFolder.findChildFiles(
juce::File::findFiles, // nur Dateien (nicht Ordner)
false, // nicht rekursiv (kein Durchsuchen von Unterordnern)
"*.xml" // Pattern: alle Dateien
);
for (const auto& f : files) {
if (f.getFileName() != preset) {
continue;
}
f.deleteFile();
}
}
void CrystalizerEQAudioProcessor::resetAllParameters() const{
for (auto* p : getParameters()) {
if (p == nullptr) continue;
if (auto* ranged = dynamic_cast<juce::RangedAudioParameter*>(p))
{
const float def = ranged->getDefaultValue(); // normalisiert [0..1]
ranged->beginChangeGesture();
ranged->setValueNotifyingHost(def);
ranged->endChangeGesture();
}
}
}
void CrystalizerEQAudioProcessor::parameterChanged (const juce::String& id, float v)
{
}
juce::StringArray CrystalizerEQAudioProcessor::getPresetNamesArray() const{
juce::StringArray presetNames = {"Init"};
auto desktop = juce::File::getSpecialLocation(juce::File::userDesktopDirectory);
// Unterordner auf dem Desktop anlegen
auto presetFolder = desktop.getChildFile("CrystalizerEQ_Presets");
juce::Array<juce::File> files = presetFolder.findChildFiles(
juce::File::findFiles, // nur Dateien (nicht Ordner)
false, // nicht rekursiv (kein Durchsuchen von Unterordnern)
"*.xml" // Pattern: alle Dateien
);
for (const auto& f : files) {
const auto presetName = f.getFileNameWithoutExtension();
presetNames.add(presetName);
}
return presetNames;
}
void CrystalizerEQAudioProcessor::releaseResources()
{
// When playback stops, you can use this as an opportunity to free up any
// spare memory, etc.
}
#ifndef JucePlugin_PreferredChannelConfigurations
bool CrystalizerEQAudioProcessor::isBusesLayoutSupported (const BusesLayout& layouts) const
{
#if JucePlugin_IsMidiEffect
juce::ignoreUnused (layouts);
return true;
#else
// This is the place where you check if the layout is supported.
// In this template code we only support mono or stereo.
// Some plugin hosts, such as certain GarageBand versions, will only
// load plugins that support stereo bus layouts.
if (layouts.getMainOutputChannelSet() != juce::AudioChannelSet::mono()
&& layouts.getMainOutputChannelSet() != juce::AudioChannelSet::stereo())
return false;
// This checks if the input layout matches the output layout
#if ! JucePlugin_IsSynth
if (layouts.getMainOutputChannelSet() != layouts.getMainInputChannelSet())
return false;
#endif
return true;
#endif
}
#endif
void CrystalizerEQAudioProcessor::processBlock (juce::AudioBuffer<float>& buffer, juce::MidiBuffer& midiMessages)
{
juce::ignoreUnused (midiMessages);
juce::ScopedNoDenormals noDenormals;
auto totalNumInputChannels = getTotalNumInputChannels();
auto totalNumOutputChannels = getTotalNumOutputChannels();
// In case we have more outputs than inputs, this code clears any output
// channels that didn't contain input data, (because these aren't
// guaranteed to be empty - they may contain garbage).
// This is here to avoid people getting screaming feedback
// when they first compile a plugin, but obviously you don't need to keep
// this code if your algorithm always overwrites all the output channels.
for (auto i = totalNumInputChannels; i < totalNumOutputChannels; ++i)
buffer.clear (i, 0, buffer.getNumSamples());
juce::AudioBuffer<float> lowBuf, highBuf;
// This is the place where you'd normally do the guts of your plugin's
// audio processing...
// Make sure to reset the state if your inner loop is processing
// the samples and the outer loop is handling the channels.
// Alternatively, you can process the samples with the channels
// interleaved by keeping the same state.
// (Einfacher Weg) pro Block Parameter lesen & Filter updaten
const bool testNoiseOn = apvts.getRawParameterValue("TestNoiseEnabled")->load() > 0.5f;
if (testNoiseOn)
{
buffer.clear();
const float noiseLevelDb = apvts.getRawParameterValue("TestNoiseLevel")->load();
const float gain = juce::Decibels::decibelsToGain (noiseLevelDb);
const int numSamples = buffer.getNumSamples();
const int numChannels = buffer.getNumChannels();
for (int ch = 0; ch < numChannels; ++ch)
{
auto* write = buffer.getWritePointer (ch);
auto& rng = (ch == 0 ? noiseRandL : noiseRandR);
for (int n = 0; n < numSamples; ++n)
{
// rng.nextFloat() ∈ [0,1) → in [-1,1)
const float white = 2.0f * rng.nextFloat() - 1.0f;
write[n] += white * gain; // ERSETZEN des Host-Inputs
}
}
}
const bool masterBypassed = apvts.getRawParameterValue("MasterBypass")->load() > 0.5f;
if (masterBypassed)
return;
const auto crystalized = static_cast<int>(apvts.getRawParameterValue("CrystalizeButton")->load());
if (crystalized) {
lowBuf.makeCopyOf(buffer, true);
highBuf.makeCopyOf(buffer, true);
buffer.makeCopyOf(processMultiBand(lowBuf, highBuf), true);
}
updateFilters();
juce::dsp::AudioBlock<float> block (buffer);
auto leftBlock = block.getSingleChannelBlock (0);
auto rightBlock = block.getSingleChannelBlock (1);
juce::dsp::ProcessContextReplacing<float> leftCtx (leftBlock);
juce::dsp::ProcessContextReplacing<float> rightCtx(rightBlock);
leftChain.process (leftCtx);
rightChain.process (rightCtx);
audioFIFO.loadSamplesToFIFO(buffer);
}
juce::AudioBuffer<float> CrystalizerEQAudioProcessor::processMultiBand(juce::AudioBuffer<float>& lowBuf, juce::AudioBuffer<float>& highBuf) {
const auto sr = getSampleRate();
float fc = 10000.0f;
fc = juce::jlimit(20.0f, 0.49f * (float)sr, fc);
auto peakGain = juce::Decibels::gainToDecibels(1.0f);
auto lp = Coeff::makeLowPass (sr, fc, 0.7071f);
auto hp = Coeff::makeHighPass(sr, fc, 0.7071f);
mbLowpass.setType(juce::dsp::LinkwitzRileyFilterType::lowpass);
mbLowpass.setCutoffFrequency(fc);
mbHighpass.setType(juce::dsp::LinkwitzRileyFilterType::highpass);
mbHighpass.setCutoffFrequency(fc);
{
juce::dsp::AudioBlock<float> lowBlock(lowBuf);
auto leftLowBlock = lowBlock.getSingleChannelBlock (0);
auto rightLowBlock = lowBlock.getSingleChannelBlock (1);
juce::dsp::ProcessContextReplacing<float> leftLowCtx(leftLowBlock);
juce::dsp::ProcessContextReplacing<float> rightLowCtx(rightLowBlock);
mbLowpass.process(leftLowCtx);
mbLowpass.process(rightLowCtx);
}
{
//HIGH-BAND PROCESSING
juce::dsp::AudioBlock<float> highBlock(highBuf);
auto leftHighBlock = highBlock.getSingleChannelBlock (0);
auto rightHighBlock = highBlock.getSingleChannelBlock (1);
juce::dsp::ProcessContextReplacing<float> leftHighCtx(leftHighBlock);
juce::dsp::ProcessContextReplacing<float> rightHighCtx(rightHighBlock);
mbHighpass.process(leftHighCtx);
mbHighpass.process(rightHighCtx);
//WHITE NOISE ON HIGH-BAND
const int numSamples = highBuf.getNumSamples();
const int numChannels = highBuf.getNumChannels();
const float rms = highBuf.getRMSLevel(0, 0, numSamples); // oder Mittelwert über beide Kanäle
const float dyn = juce::jlimit(0.0f, 1.0f, rms * 2.0f); // einfacher Kompressor
const float gain = dyn * juce::Decibels::decibelsToGain(.5f);
for (int ch = 0; ch < numChannels; ++ch) {
auto* write = highBuf.getWritePointer (ch);
auto& rng = (ch == 0 ? noiseRandL : noiseRandR);
for (int n = 0; n < numSamples; ++n)
{
// rng.nextFloat() ∈ [0,1) → in [-1,1)
const float white = 2.0f * rng.nextFloat() - 1.0f;
write[n] += white * gain; // ERSETZEN des Host-Inputs
}
}
saturator.process(leftHighCtx);
saturator.process(rightHighCtx);
}
juce::AudioBuffer<float> out;
out.makeCopyOf(lowBuf, true);
const int numCh = out.getNumChannels();
const int numSm = out.getNumSamples();
for (int ch = 0; ch < numCh; ++ch)
out.addFrom(ch, 0, highBuf, ch, 0, numSm, 1.0f);
return out;
}
void CrystalizerEQAudioProcessor::setPresetName (const juce::String& name)
{
presetName = name;
}
//==============================================================================
bool CrystalizerEQAudioProcessor::hasEditor() const
{
return true; // (change this to false if you choose to not supply an editor)
}
juce::AudioProcessorEditor* CrystalizerEQAudioProcessor::createEditor()
{
return new CrystalizerEQAudioProcessorEditor (*this);
}
//==============================================================================
void CrystalizerEQAudioProcessor::getStateInformation (juce::MemoryBlock& destData)
{
// You should use this method to store your parameters in the memory block.
// You could do that either as raw data, or use the XML or ValueTree classes
// as intermediaries to make it easy to save and load complex data.
}
void CrystalizerEQAudioProcessor::setStateInformation (const void* data, int sizeInBytes)
{
// You should use this method to restore your parameters from this memory block,
// whose contents will have been created by the getStateInformation() call.
}
//==============================================================================
// This creates new instances of the plugin..
juce::AudioProcessor* JUCE_CALLTYPE createPluginFilter()
{
return new CrystalizerEQAudioProcessor();
}
void AudioFIFO::loadSamplesToFIFO(const juce::AudioBuffer<float> &samples) {
const int numSamples = samples.getNumSamples();
const float* channelData = samples.getReadPointer(0);
const juce::SpinLock::ScopedLockType guard(lock); // <— NEU
sampleStack.ensureStorageAllocated(sampleStack.size() + numSamples);
for (int i = 0; i < numSamples; ++i)
{
sampleStack.add(channelData[i]);
}
}
juce::Array<float> AudioFIFO::sendSamplesToEditor(){
const juce::SpinLock::ScopedLockType guard(lock);
juce::Array<float> copiedSamples = sampleStack;
sampleStack.clear();
return copiedSamples;
}