856 lines
30 KiB
C++
856 lines
30 KiB
C++
/*
|
|
==============================================================================
|
|
PluginProcessor.cpp
|
|
|
|
Core audio processing implementation for CrystalizerEQ
|
|
Implements 5-band EQ with shelving/bell/cut modes, multiband saturation,
|
|
and preset management system
|
|
==============================================================================
|
|
*/
|
|
|
|
#include "PluginProcessor.h"
|
|
#include "PluginEditor.h"
|
|
|
|
|
|
juce::AudioProcessorValueTreeState::ParameterLayout
|
|
CrystalizerEQAudioProcessor::createParameterLayout() {
|
|
std::vector<std::unique_ptr<juce::RangedAudioParameter> > params;
|
|
|
|
//LOW-BAND
|
|
|
|
params.push_back(std::make_unique<juce::AudioParameterFloat>(
|
|
"LowBandFreq", "LowBand Freq",
|
|
juce::NormalisableRange<float>(20.f, 20000.f, 1.f, 0.5f), 30.f));
|
|
|
|
params.push_back(std::make_unique<juce::AudioParameterFloat>("LowBandGain", "LowBand Gain",
|
|
juce::NormalisableRange<float>(-48.f, 48.f, 0.1f),
|
|
0.f));
|
|
|
|
params.push_back(std::make_unique<juce::AudioParameterFloat>("LowBandQ", "LowBand Q",
|
|
juce::NormalisableRange<float>(0.1f, 10.f, 0.01f),
|
|
1.f));
|
|
|
|
params.push_back(std::make_unique<juce::AudioParameterChoice>(
|
|
"LowBandSlope", "LowBand Slope", // Anzeigename
|
|
juce::StringArray{"12", "24", "36", "48"}, 1));
|
|
|
|
params.push_back(std::make_unique<juce::AudioParameterChoice>(
|
|
"LowBandModes", // Parameter-ID
|
|
"Low Band Modes", // Anzeigename
|
|
juce::StringArray{"Off", "Cut", "Shelf", "Bell"}, // Einträge
|
|
2));
|
|
|
|
//PEAK 1
|
|
|
|
params.push_back(std::make_unique<juce::AudioParameterFloat>(
|
|
"Peak1Freq", "Peak1 Freq",
|
|
juce::NormalisableRange<float>(100.f, 1500.f, 1.f, 0.5f), 250.f));
|
|
|
|
params.push_back(std::make_unique<juce::AudioParameterFloat>(
|
|
"Peak1Gain", "Peak1 Gain (dB)",
|
|
juce::NormalisableRange<float>(-48.f, 48.f, 0.1f), 0.f));
|
|
|
|
params.push_back(std::make_unique<juce::AudioParameterFloat>(
|
|
"Peak1Q", "Peak1 Q",
|
|
juce::NormalisableRange<float>(0.1f, 10.f, 0.01f), 1.f));
|
|
|
|
params.push_back(std::make_unique<juce::AudioParameterBool>("Peak1Bypass", "Peak1 Bypass", false));
|
|
|
|
|
|
//PEAK 2
|
|
|
|
params.push_back(std::make_unique<juce::AudioParameterFloat>(
|
|
"Peak2Freq", "Peak2 Freq",
|
|
juce::NormalisableRange<float>(400.f, 6000.f, 1.f, 0.5f), 1500.f));
|
|
|
|
params.push_back(std::make_unique<juce::AudioParameterFloat>(
|
|
"Peak2Gain", "Peak2 Gain (dB)",
|
|
juce::NormalisableRange<float>(-48.f, 48.f, 0.1f), 0.f));
|
|
|
|
params.push_back(std::make_unique<juce::AudioParameterFloat>(
|
|
"Peak2Q", "Peak2 Q",
|
|
juce::NormalisableRange<float>(0.1f, 10.f, 0.01f), 1.f));
|
|
|
|
params.push_back(std::make_unique<juce::AudioParameterBool>("Peak2Bypass", "Peak2 Bypass", false));
|
|
|
|
|
|
//PEAK 3
|
|
|
|
params.push_back(std::make_unique<juce::AudioParameterFloat>(
|
|
"Peak3Freq", "Peak3 Freq",
|
|
juce::NormalisableRange<float>(1000.f, 16000.f, 1.f, 0.5f), 6000.f));
|
|
|
|
params.push_back(std::make_unique<juce::AudioParameterFloat>(
|
|
"Peak3Gain", "Peak3 Gain (dB)",
|
|
juce::NormalisableRange<float>(-48.f, 48.f, 0.1f), 0.f));
|
|
|
|
params.push_back(std::make_unique<juce::AudioParameterFloat>(
|
|
"Peak3Q", "Peak3 Q",
|
|
juce::NormalisableRange<float>(0.1f, 10.f, 0.01f), 1.f));
|
|
|
|
params.push_back(std::make_unique<juce::AudioParameterBool>("Peak3Bypass", "Peak3 Bypass", false));
|
|
|
|
|
|
//HIGH BAND
|
|
|
|
params.push_back(std::make_unique<juce::AudioParameterFloat>(
|
|
"HighBandFreq", "HighBand Freq",
|
|
juce::NormalisableRange<float>(20.f, 20000.f, 1.f, 0.5f), 17000.f));
|
|
|
|
params.push_back(std::make_unique<juce::AudioParameterFloat>("HighBandGain", "HighBand Gain",
|
|
juce::NormalisableRange<float>(-48.f, 48.f, 0.1f),
|
|
0.f));
|
|
|
|
params.push_back(std::make_unique<juce::AudioParameterFloat>("HighBandQ", "HighBand Q",
|
|
juce::NormalisableRange<float>(0.1f, 10.f, 0.01f),
|
|
1.f));
|
|
|
|
params.push_back(std::make_unique<juce::AudioParameterChoice>(
|
|
"HighBandSlope", "HighBand Slope", // Anzeigename
|
|
juce::StringArray{"12", "24", "36", "48"}, 1));
|
|
|
|
params.push_back(std::make_unique<juce::AudioParameterChoice>(
|
|
"HighBandModes", // Parameter-ID
|
|
"High Band Modes", // Anzeigename
|
|
juce::StringArray{"Off", "Cut", "Shelf", "Bell"}, // Einträge
|
|
2));
|
|
|
|
|
|
params.push_back(std::make_unique<juce::AudioParameterFloat>("InputGain", "Input Gain",
|
|
juce::NormalisableRange<float>(-30.f, 30.0f, 0.1f),
|
|
0.0f));
|
|
|
|
params.push_back(std::make_unique<juce::AudioParameterFloat>("OutputGain", "Output Gain",
|
|
juce::NormalisableRange<float>(-30.f, 30.0f, 0.1f),
|
|
0.0f));
|
|
|
|
params.push_back(std::make_unique<juce::AudioParameterBool>("CrystalizeButton", "Crystalize Button", false));
|
|
|
|
|
|
params.push_back(std::make_unique<juce::AudioParameterBool>("MasterBypass", "MasterBypass", false));
|
|
|
|
return {params.begin(), params.end()};
|
|
}
|
|
|
|
|
|
//==============================================================================
|
|
CrystalizerEQAudioProcessor::CrystalizerEQAudioProcessor()
|
|
#ifndef JucePlugin_PreferredChannelConfigurations
|
|
: AudioProcessor(BusesProperties()
|
|
#if ! JucePlugin_IsMidiEffect
|
|
#if ! JucePlugin_IsSynth
|
|
.withInput("Input", juce::AudioChannelSet::stereo(), true)
|
|
#endif
|
|
.withOutput("Output", juce::AudioChannelSet::stereo(), true)
|
|
#endif
|
|
)
|
|
#endif
|
|
{
|
|
}
|
|
|
|
CrystalizerEQAudioProcessor::~CrystalizerEQAudioProcessor() = default;
|
|
|
|
|
|
//==============================================================================
|
|
const juce::String CrystalizerEQAudioProcessor::getName() const {
|
|
return JucePlugin_Name;
|
|
}
|
|
|
|
bool CrystalizerEQAudioProcessor::acceptsMidi() const {
|
|
#if JucePlugin_WantsMidiInput
|
|
return true;
|
|
#else
|
|
return false;
|
|
#endif
|
|
}
|
|
|
|
bool CrystalizerEQAudioProcessor::producesMidi() const {
|
|
#if JucePlugin_ProducesMidiOutput
|
|
return true;
|
|
#else
|
|
return false;
|
|
#endif
|
|
}
|
|
|
|
bool CrystalizerEQAudioProcessor::isMidiEffect() const {
|
|
#if JucePlugin_IsMidiEffect
|
|
return true;
|
|
#else
|
|
return false;
|
|
#endif
|
|
}
|
|
|
|
double CrystalizerEQAudioProcessor::getTailLengthSeconds() const {
|
|
return 0.0;
|
|
}
|
|
|
|
int CrystalizerEQAudioProcessor::getNumPrograms() {
|
|
return 1; // NB: some hosts don't cope very well if you tell them there are 0 programs,
|
|
// so this should be at least 1, even if you're not really implementing programs.
|
|
}
|
|
|
|
int CrystalizerEQAudioProcessor::getCurrentProgram() {
|
|
return 0;
|
|
}
|
|
|
|
void CrystalizerEQAudioProcessor::setCurrentProgram(int index) {
|
|
}
|
|
|
|
const juce::String CrystalizerEQAudioProcessor::getProgramName(int index) {
|
|
return {};
|
|
}
|
|
|
|
void CrystalizerEQAudioProcessor::changeProgramName(int index, const juce::String &newName) {
|
|
}
|
|
|
|
AudioFIFO::AudioFIFO() {
|
|
}
|
|
|
|
AudioFIFO::~AudioFIFO() {
|
|
}
|
|
|
|
|
|
//==============================================================================
|
|
void CrystalizerEQAudioProcessor::prepareToPlay(double sampleRate, int samplesPerBlock) {
|
|
// Use this method as the place to do any pre-playback
|
|
// initialisation that you need..
|
|
|
|
juce::dsp::ProcessSpec spec;
|
|
spec.sampleRate = sampleRate;
|
|
spec.maximumBlockSize = static_cast<juce::uint32>(samplesPerBlock);
|
|
spec.numChannels = static_cast<juce::uint32>(getTotalNumOutputChannels());
|
|
|
|
|
|
mbLowpassL.prepare(spec);
|
|
mbLowpassR.prepare(spec);
|
|
|
|
mbHighpassL.prepare(spec);
|
|
mbHighpassR.prepare(spec);
|
|
|
|
mbLowpassL.reset();
|
|
mbLowpassR.reset();
|
|
|
|
mbHighpassL.reset();
|
|
mbHighpassR.reset();
|
|
|
|
mbHighPeakL.prepare(spec);
|
|
mbHighPeakR.prepare(spec);
|
|
mbHighPeakL.reset();
|
|
mbHighPeakR.reset();
|
|
|
|
|
|
saturatorL.prepare(spec);
|
|
|
|
|
|
saturatorL.functionToUse = [](float x) {
|
|
return std::tanh(x);
|
|
};
|
|
saturatorL.reset();
|
|
|
|
saturatorR.prepare(spec);
|
|
saturatorR.functionToUse = [](float x) {
|
|
return std::tanh(x);
|
|
};
|
|
saturatorR.reset();
|
|
|
|
spec.numChannels = static_cast<juce::uint32>(getTotalNumOutputChannels());
|
|
leftChain.prepare(spec);
|
|
rightChain.prepare(spec);
|
|
|
|
leftChain.reset();
|
|
rightChain.reset();
|
|
|
|
updateFilters();
|
|
}
|
|
|
|
static void setCoeffs(juce::dsp::IIR::Filter<float> &f,
|
|
juce::dsp::IIR::Coefficients<float>::Ptr c) {
|
|
f.coefficients = c;
|
|
}
|
|
|
|
|
|
void CrystalizerEQAudioProcessor::updateFilters() {
|
|
const auto sr = getSampleRate();
|
|
|
|
const auto lowFreq = apvts.getRawParameterValue("LowBandFreq")->load();
|
|
const auto lowSlope = static_cast<int>(apvts.getRawParameterValue("LowBandSlope")->load());
|
|
const auto lowGdB = apvts.getRawParameterValue("LowBandGain")->load(); // falls angelegt
|
|
const auto lowQ = apvts.getRawParameterValue("LowBandQ")->load();
|
|
const int lowBandModes =
|
|
static_cast<int>(apvts.getRawParameterValue("LowBandModes")->load());
|
|
|
|
const auto peak1F = apvts.getRawParameterValue("Peak1Freq")->load();
|
|
const auto peak1GdB = apvts.getRawParameterValue("Peak1Gain")->load();
|
|
const auto peak1Q = apvts.getRawParameterValue("Peak1Q")->load();
|
|
const auto peak1Bypass = apvts.getRawParameterValue("Peak1Bypass")->load();
|
|
|
|
const auto peak2F = apvts.getRawParameterValue("Peak2Freq")->load();
|
|
const auto peak2GdB = apvts.getRawParameterValue("Peak2Gain")->load();
|
|
const auto peak2Q = apvts.getRawParameterValue("Peak2Q")->load();
|
|
const auto peak2Bypass = apvts.getRawParameterValue("Peak2Bypass")->load();
|
|
|
|
|
|
const auto peak3F = apvts.getRawParameterValue("Peak3Freq")->load();
|
|
const auto peak3GdB = apvts.getRawParameterValue("Peak3Gain")->load();
|
|
const auto peak3Q = apvts.getRawParameterValue("Peak3Q")->load();
|
|
const auto peak3Bypass = apvts.getRawParameterValue("Peak3Bypass")->load();
|
|
|
|
|
|
const auto highFreq = apvts.getRawParameterValue("HighBandFreq")->load();
|
|
const auto highSlope = static_cast<int>(apvts.getRawParameterValue("HighBandSlope")->load());
|
|
const auto highGdB = apvts.getRawParameterValue("HighBandGain")->load(); // falls angelegt
|
|
const auto highQ = apvts.getRawParameterValue("HighBandQ")->load();
|
|
const int highBandModes =
|
|
static_cast<int>(apvts.getRawParameterValue("HighBandModes")->load());
|
|
|
|
const auto inputGdB = apvts.getRawParameterValue("InputGain")->load();
|
|
const auto outputGdB = apvts.getRawParameterValue("OutputGain")->load();
|
|
|
|
|
|
auto peak1 = juce::dsp::IIR::Coefficients<float>::makePeakFilter(sr, peak1F, peak1Q,
|
|
juce::Decibels::decibelsToGain(peak1GdB));
|
|
auto peak2 = juce::dsp::IIR::Coefficients<float>::makePeakFilter(sr, peak2F, peak2Q,
|
|
juce::Decibels::decibelsToGain(peak2GdB));
|
|
auto peak3 = juce::dsp::IIR::Coefficients<float>::makePeakFilter(sr, peak3F, peak3Q,
|
|
juce::Decibels::decibelsToGain(peak3GdB));
|
|
|
|
const auto crystalized = static_cast<int>(apvts.getRawParameterValue("CrystalizeButton")->load());
|
|
|
|
|
|
juce::dsp::IIR::Coefficients<float>::Ptr lowBand;
|
|
juce::dsp::IIR::Coefficients<float>::Ptr highBand;
|
|
|
|
leftChain.get<0>().setGainDecibels(inputGdB);
|
|
rightChain.get<0>().setGainDecibels(inputGdB);
|
|
|
|
setCoeffs(leftChain.get<5>(), peak1);
|
|
setCoeffs(rightChain.get<5>(), peak1);
|
|
setCoeffs(leftChain.get<6>(), peak2);
|
|
setCoeffs(rightChain.get<6>(), peak2);
|
|
setCoeffs(leftChain.get<7>(), peak3);
|
|
setCoeffs(rightChain.get<7>(), peak3);
|
|
|
|
const auto crystalizedShelf = juce::dsp::IIR::Coefficients<float>::makeHighShelf(
|
|
sr, 12000.0f, 1.0f, juce::Decibels::decibelsToGain(4.0f));
|
|
setCoeffs(leftChain.get<12>(), crystalizedShelf);
|
|
setCoeffs(rightChain.get<12>(), crystalizedShelf);
|
|
|
|
const auto crystalizedBell = juce::dsp::IIR::Coefficients<float>::makePeakFilter(
|
|
sr, 10000.0f, 1.0f, juce::Decibels::decibelsToGain(2.0f));
|
|
setCoeffs(leftChain.get<13>(), crystalizedBell);
|
|
setCoeffs(rightChain.get<13>(), crystalizedBell);
|
|
|
|
leftChain.get<14>().setGainDecibels(outputGdB);
|
|
rightChain.get<14>().setGainDecibels(outputGdB);
|
|
|
|
|
|
if (peak1Bypass) {
|
|
leftChain.setBypassed<5>(true);
|
|
rightChain.setBypassed<5>(true);
|
|
} else {
|
|
leftChain.setBypassed<5>(false);
|
|
rightChain.setBypassed<5>(false);
|
|
}
|
|
|
|
if (peak2Bypass) {
|
|
leftChain.setBypassed<6>(true);
|
|
rightChain.setBypassed<6>(true);
|
|
} else {
|
|
leftChain.setBypassed<6>(false);
|
|
rightChain.setBypassed<6>(false);
|
|
}
|
|
|
|
if (peak3Bypass) {
|
|
leftChain.setBypassed<7>(true);
|
|
rightChain.setBypassed<7>(true);
|
|
} else {
|
|
leftChain.setBypassed<7>(false);
|
|
rightChain.setBypassed<7>(false);
|
|
}
|
|
|
|
|
|
const float lowGainLin = juce::Decibels::decibelsToGain(lowGdB);
|
|
|
|
switch (lowBandModes) {
|
|
case 0:
|
|
leftChain.setBypassed<1>(true);
|
|
rightChain.setBypassed<1>(true);
|
|
leftChain.setBypassed<2>(true);
|
|
rightChain.setBypassed<2>(true);
|
|
leftChain.setBypassed<3>(true);
|
|
rightChain.setBypassed<3>(true);
|
|
leftChain.setBypassed<4>(true);
|
|
rightChain.setBypassed<4>(true);
|
|
|
|
break;
|
|
case 1: {
|
|
const auto q = lowQ;
|
|
lowBand = juce::dsp::IIR::Coefficients<float>::makeHighPass(sr, lowFreq, q);
|
|
setCoeffs(leftChain.get<1>(), lowBand);
|
|
setCoeffs(rightChain.get<1>(), lowBand);
|
|
|
|
leftChain.setBypassed<1>(false);
|
|
rightChain.setBypassed<1>(false);
|
|
|
|
setCoeffs(leftChain.get<2>(), lowBand);
|
|
setCoeffs(rightChain.get<2>(), lowBand);
|
|
setCoeffs(leftChain.get<3>(), lowBand);
|
|
setCoeffs(rightChain.get<3>(), lowBand);
|
|
setCoeffs(leftChain.get<4>(), lowBand);
|
|
setCoeffs(rightChain.get<4>(), lowBand);
|
|
|
|
|
|
leftChain.setBypassed<2>(lowSlope < 2);
|
|
rightChain.setBypassed<2>(lowSlope < 2);
|
|
leftChain.setBypassed<3>(lowSlope < 3);
|
|
rightChain.setBypassed<3>(lowSlope < 3);
|
|
leftChain.setBypassed<4>(lowSlope < 4);
|
|
rightChain.setBypassed<4>(lowSlope < 4);
|
|
|
|
break;
|
|
}
|
|
case 2:
|
|
lowBand = juce::dsp::IIR::Coefficients<float>::makeLowShelf(sr, lowFreq, lowQ, lowGainLin);
|
|
setCoeffs(leftChain.get<1>(), lowBand);
|
|
setCoeffs(rightChain.get<1>(), lowBand);
|
|
leftChain.setBypassed<1>(false);
|
|
rightChain.setBypassed<1>(false);
|
|
|
|
break;
|
|
case 3:
|
|
lowBand = juce::dsp::IIR::Coefficients<float>::makePeakFilter(sr, lowFreq, lowQ, lowGainLin);
|
|
setCoeffs(leftChain.get<1>(), lowBand);
|
|
setCoeffs(rightChain.get<1>(), lowBand);
|
|
leftChain.setBypassed<1>(false);
|
|
rightChain.setBypassed<1>(false);
|
|
break;
|
|
}
|
|
|
|
//HIGH-BAND
|
|
const float highGainHin = juce::Decibels::decibelsToGain(highGdB);
|
|
|
|
switch (highBandModes) {
|
|
case 0:
|
|
leftChain.setBypassed<8>(true);
|
|
rightChain.setBypassed<8>(true);
|
|
leftChain.setBypassed<9>(true);
|
|
rightChain.setBypassed<9>(true);
|
|
leftChain.setBypassed<10>(true);
|
|
rightChain.setBypassed<10>(true);
|
|
leftChain.setBypassed<11>(true);
|
|
rightChain.setBypassed<11>(true);
|
|
|
|
break;
|
|
case 1: {
|
|
const auto q = highQ;
|
|
highBand = juce::dsp::IIR::Coefficients<float>::makeLowPass(sr, highFreq, q);
|
|
setCoeffs(leftChain.get<8>(), highBand);
|
|
setCoeffs(rightChain.get<8>(), highBand);
|
|
|
|
leftChain.setBypassed<8>(false);
|
|
rightChain.setBypassed<8>(false);
|
|
|
|
setCoeffs(leftChain.get<9>(), highBand);
|
|
setCoeffs(rightChain.get<9>(), highBand);
|
|
setCoeffs(leftChain.get<10>(), highBand);
|
|
setCoeffs(rightChain.get<10>(), highBand);
|
|
setCoeffs(leftChain.get<11>(), highBand);
|
|
setCoeffs(rightChain.get<11>(), highBand);
|
|
|
|
|
|
leftChain.setBypassed<9>(highSlope < 2);
|
|
rightChain.setBypassed<9>(highSlope < 2);
|
|
leftChain.setBypassed<10>(highSlope < 3);
|
|
rightChain.setBypassed<10>(highSlope < 3);
|
|
leftChain.setBypassed<11>(highSlope < 4);
|
|
rightChain.setBypassed<11>(highSlope < 4);
|
|
|
|
break;
|
|
}
|
|
case 2:
|
|
highBand = juce::dsp::IIR::Coefficients<float>::makeHighShelf(sr, highFreq, highQ, highGainHin);
|
|
setCoeffs(leftChain.get<8>(), highBand);
|
|
setCoeffs(rightChain.get<8>(), highBand);
|
|
leftChain.setBypassed<8>(false);
|
|
rightChain.setBypassed<8>(false);
|
|
|
|
break;
|
|
case 3:
|
|
highBand = juce::dsp::IIR::Coefficients<float>::makePeakFilter(sr, highFreq, highQ, highGainHin);
|
|
setCoeffs(leftChain.get<8>(), highBand);
|
|
setCoeffs(rightChain.get<8>(), highBand);
|
|
leftChain.setBypassed<8>(false);
|
|
rightChain.setBypassed<8>(false);
|
|
|
|
break;
|
|
}
|
|
|
|
if (!crystalized) {
|
|
leftChain.setBypassed<12>(true);
|
|
rightChain.setBypassed<12>(true);
|
|
leftChain.setBypassed<13>(true);
|
|
rightChain.setBypassed<13>(true);
|
|
} else {
|
|
leftChain.setBypassed<12>(false);
|
|
rightChain.setBypassed<12>(false);
|
|
leftChain.setBypassed<13>(false);
|
|
rightChain.setBypassed<13>(false);
|
|
}
|
|
|
|
lowCutActive = (lowBandModes == 1);
|
|
highCutActive = (highBandModes == 1);
|
|
}
|
|
|
|
juce::String CrystalizerEQAudioProcessor::savePresetToFile() const {
|
|
const auto nameInput = getPresetName();
|
|
|
|
auto appData = juce::File::getSpecialLocation(juce::File::userApplicationDataDirectory);
|
|
|
|
auto presetFolder = appData.getChildFile("AXIOM")
|
|
.getChildFile("CrystalizerEQ")
|
|
.getChildFile("Presets");
|
|
|
|
presetFolder.createDirectory();
|
|
|
|
auto file = presetFolder.getNonexistentChildFile(nameInput, ".xml");
|
|
|
|
juce::ValueTree preset("Preset");
|
|
preset.setProperty("name", nameInput, nullptr);
|
|
|
|
for (auto *p: getParameters()) {
|
|
if (p == nullptr) continue;
|
|
|
|
if (auto *ranged = dynamic_cast<juce::RangedAudioParameter *>(p)) {
|
|
if (ranged->getParameterID() == "MasterBypass") { continue; }
|
|
|
|
juce::ValueTree param("Param");
|
|
param.setProperty("id", ranged->getParameterID(), nullptr);
|
|
param.setProperty("value", ranged->getValue(), nullptr);
|
|
|
|
preset.addChild(param, -1, nullptr);
|
|
}
|
|
}
|
|
|
|
std::unique_ptr<juce::XmlElement> xml(preset.createXml());
|
|
xml->writeToFile(juce::File(file), {});
|
|
|
|
return file.getFileNameWithoutExtension();
|
|
}
|
|
|
|
void CrystalizerEQAudioProcessor::loadPreset(const juce::String &preset) {
|
|
auto appData = juce::File::getSpecialLocation(juce::File::userApplicationDataDirectory);
|
|
auto presetFolder = appData.getChildFile("AXIOM")
|
|
.getChildFile("CrystalizerEQ")
|
|
.getChildFile("Presets");
|
|
|
|
auto files = presetFolder.findChildFiles(juce::File::findFiles, false, "*.xml");
|
|
|
|
for (const auto &f: files) {
|
|
if (f.getFileName() != preset) {
|
|
continue;
|
|
}
|
|
|
|
std::unique_ptr<juce::XmlElement> xml(juce::XmlDocument::parse(f));
|
|
if (xml == nullptr)
|
|
return;
|
|
|
|
for (auto *p: getParameters()) {
|
|
if (p == nullptr) continue;
|
|
|
|
if (auto *ranged = dynamic_cast<juce::RangedAudioParameter *>(p)) {
|
|
if (auto *child = xml->getFirstChildElement()) {
|
|
while (child != nullptr) {
|
|
juce::String id = child->getStringAttribute("id");
|
|
|
|
if (id == ranged->getParameterID()) {
|
|
float value = (float) child->getDoubleAttribute("value");
|
|
ranged->beginChangeGesture();
|
|
ranged->setValueNotifyingHost(value);
|
|
ranged->endChangeGesture();
|
|
break;
|
|
}
|
|
child = child->getNextElement();
|
|
}
|
|
}
|
|
}
|
|
}
|
|
presetName = f.getFileNameWithoutExtension();
|
|
}
|
|
}
|
|
|
|
void CrystalizerEQAudioProcessor::deletePreset(const juce::String &preset) const {
|
|
auto appData = juce::File::getSpecialLocation(juce::File::userApplicationDataDirectory);
|
|
auto presetFolder = appData.getChildFile("AXIOM")
|
|
.getChildFile("CrystalizerEQ")
|
|
.getChildFile("Presets");
|
|
|
|
juce::Array<juce::File> files = presetFolder.findChildFiles(
|
|
juce::File::findFiles,
|
|
false,
|
|
"*.xml"
|
|
);
|
|
|
|
for (const auto &f: files) {
|
|
if (f.getFileName() != preset) {
|
|
continue;
|
|
}
|
|
f.deleteFile();
|
|
}
|
|
}
|
|
|
|
void CrystalizerEQAudioProcessor::resetAllParameters() {
|
|
for (auto *p: getParameters()) {
|
|
if (p == nullptr) continue;
|
|
|
|
if (auto *ranged = dynamic_cast<juce::RangedAudioParameter *>(p)) {
|
|
const float def = ranged->getDefaultValue();
|
|
ranged->beginChangeGesture();
|
|
ranged->setValueNotifyingHost(def);
|
|
ranged->endChangeGesture();
|
|
}
|
|
}
|
|
presetName = "Init";
|
|
}
|
|
|
|
void CrystalizerEQAudioProcessor::parameterChanged(const juce::String &id, float v) {
|
|
}
|
|
|
|
juce::StringArray CrystalizerEQAudioProcessor::getPresetNamesArray() const {
|
|
juce::StringArray presetNames = {"Init"};
|
|
auto appData = juce::File::getSpecialLocation(juce::File::userApplicationDataDirectory);
|
|
auto presetFolder = appData.getChildFile("AXIOM")
|
|
.getChildFile("CrystalizerEQ")
|
|
.getChildFile("Presets");
|
|
|
|
juce::Array<juce::File> files = presetFolder.findChildFiles(
|
|
juce::File::findFiles,
|
|
false,
|
|
"*.xml"
|
|
);
|
|
|
|
for (const auto &f: files) {
|
|
const auto presetName = f.getFileNameWithoutExtension();
|
|
presetNames.add(presetName);
|
|
}
|
|
|
|
return presetNames;
|
|
}
|
|
|
|
void CrystalizerEQAudioProcessor::releaseResources() {
|
|
// When playback stops, you can use this as an opportunity to free up any
|
|
// spare memory, etc.
|
|
}
|
|
|
|
#ifndef JucePlugin_PreferredChannelConfigurations
|
|
bool CrystalizerEQAudioProcessor::isBusesLayoutSupported(const BusesLayout &layouts) const {
|
|
#if JucePlugin_IsMidiEffect
|
|
juce::ignoreUnused (layouts);
|
|
return true;
|
|
#else
|
|
|
|
if (layouts.getMainOutputChannelSet() != juce::AudioChannelSet::mono()
|
|
&& layouts.getMainOutputChannelSet() != juce::AudioChannelSet::stereo())
|
|
return false;
|
|
|
|
#if ! JucePlugin_IsSynth
|
|
if (layouts.getMainOutputChannelSet() != layouts.getMainInputChannelSet())
|
|
return false;
|
|
#endif
|
|
|
|
return true;
|
|
#endif
|
|
}
|
|
#endif
|
|
|
|
void CrystalizerEQAudioProcessor::processBlock(juce::AudioBuffer<float> &buffer, juce::MidiBuffer &midiMessages) {
|
|
juce::ignoreUnused(midiMessages);
|
|
juce::ScopedNoDenormals noDenormals;
|
|
auto totalNumInputChannels = getTotalNumInputChannels();
|
|
auto totalNumOutputChannels = getTotalNumOutputChannels();
|
|
|
|
for (auto i = totalNumInputChannels; i < totalNumOutputChannels; ++i)
|
|
buffer.clear(i, 0, buffer.getNumSamples());
|
|
|
|
juce::AudioBuffer<float> lowBuf, highBuf;
|
|
|
|
const bool masterBypassed = apvts.getRawParameterValue("MasterBypass")->load() > 0.5f;
|
|
if (masterBypassed)
|
|
return;
|
|
|
|
const auto crystalized = static_cast<int>(apvts.getRawParameterValue("CrystalizeButton")->load());
|
|
|
|
if (crystalized) {
|
|
lowBuf.makeCopyOf(buffer, true);
|
|
highBuf.makeCopyOf(buffer, true);
|
|
buffer.makeCopyOf(processMultiBand(lowBuf, highBuf), true);
|
|
}
|
|
|
|
|
|
updateFilters();
|
|
|
|
|
|
juce::dsp::AudioBlock<float> block(buffer);
|
|
|
|
|
|
auto leftBlock = block.getSingleChannelBlock(0);
|
|
auto rightBlock = block.getSingleChannelBlock(1);
|
|
|
|
|
|
juce::dsp::ProcessContextReplacing<float> leftCtx(leftBlock);
|
|
juce::dsp::ProcessContextReplacing<float> rightCtx(rightBlock);
|
|
|
|
leftChain.process(leftCtx);
|
|
rightChain.process(rightCtx);
|
|
|
|
audioFIFO.loadSamplesToFIFO(buffer);
|
|
}
|
|
|
|
|
|
juce::AudioBuffer<float> CrystalizerEQAudioProcessor::processMultiBand(juce::AudioBuffer<float> &lowBuf,
|
|
juce::AudioBuffer<float> &highBuf) {
|
|
const auto sr = getSampleRate();
|
|
// Crossover frequency for multiband processing
|
|
float fc = 10000.0f;
|
|
fc = juce::jlimit(20.0f, 0.49f * (float) sr, fc);
|
|
|
|
auto lp = Coeff::makeLowPass(sr, fc, 0.7071f);
|
|
auto hp = Coeff::makeHighPass(sr, fc, 0.7071f);
|
|
|
|
mbLowpassL.setType(juce::dsp::LinkwitzRileyFilterType::lowpass);
|
|
mbLowpassL.setCutoffFrequency(fc);
|
|
mbLowpassR.setType(juce::dsp::LinkwitzRileyFilterType::lowpass);
|
|
mbLowpassR.setCutoffFrequency(fc);
|
|
|
|
mbHighpassL.setType(juce::dsp::LinkwitzRileyFilterType::highpass);
|
|
mbHighpassL.setCutoffFrequency(fc);
|
|
mbHighpassR.setType(juce::dsp::LinkwitzRileyFilterType::highpass);
|
|
mbHighpassR.setCutoffFrequency(fc);
|
|
|
|
{
|
|
juce::dsp::AudioBlock<float> lowBlock(lowBuf);
|
|
auto leftLowBlock = lowBlock.getSingleChannelBlock(0);
|
|
auto rightLowBlock = lowBlock.getSingleChannelBlock(1);
|
|
juce::dsp::ProcessContextReplacing<float> leftLowCtx(leftLowBlock);
|
|
juce::dsp::ProcessContextReplacing<float> rightLowCtx(rightLowBlock);
|
|
|
|
mbLowpassL.process(leftLowCtx);
|
|
mbLowpassR.process(rightLowCtx);
|
|
|
|
} {
|
|
//HIGH-BAND PROCESSING
|
|
juce::dsp::AudioBlock<float> highBlock(highBuf);
|
|
auto leftHighBlock = highBlock.getSingleChannelBlock(0);
|
|
auto rightHighBlock = highBlock.getSingleChannelBlock(1);
|
|
juce::dsp::ProcessContextReplacing<float> leftHighCtx(leftHighBlock);
|
|
juce::dsp::ProcessContextReplacing<float> rightHighCtx(rightHighBlock);
|
|
|
|
mbHighpassL.process(leftHighCtx);
|
|
mbHighpassR.process(rightHighCtx);
|
|
|
|
|
|
//WHITE NOISE ON HIGH-BAND
|
|
|
|
const int numSamples = highBuf.getNumSamples();
|
|
const int numChannels = highBuf.getNumChannels();
|
|
|
|
const float rmsL = highBuf.getRMSLevel(0, 0, numSamples);
|
|
const float rmsR = highBuf.getRMSLevel(1, 0, numSamples);
|
|
const float rms = (rmsL + rmsR) * 0.5f;
|
|
|
|
const float dyn = juce::jlimit(0.0f, 1.0f, rms * 2.0f);
|
|
const float gain = dyn * juce::Decibels::decibelsToGain(-3.0f);
|
|
|
|
|
|
for (int ch = 0; ch < numChannels; ++ch) {
|
|
auto *write = highBuf.getWritePointer(ch);
|
|
auto &rng = (ch == 0 ? noiseRandL : noiseRandR);
|
|
|
|
for (int n = 0; n < numSamples; ++n) {
|
|
// rng.nextFloat() ∈ [0,1) → in [-1,1)
|
|
const float white = 2.0f * rng.nextFloat() - 1.0f;
|
|
write[n] += white * gain; // ERSETZEN des Host-Inputs
|
|
}
|
|
}
|
|
|
|
saturatorL.process(leftHighCtx);
|
|
saturatorR.process(rightHighCtx);
|
|
}
|
|
|
|
|
|
juce::AudioBuffer<float> out;
|
|
out.makeCopyOf(lowBuf, true);
|
|
|
|
const int numCh = out.getNumChannels();
|
|
const int numSm = out.getNumSamples();
|
|
const float highBandGain = juce::Decibels::decibelsToGain(3.0f);
|
|
for (int ch = 0; ch < numCh; ++ch)
|
|
out.addFrom(ch, 0, highBuf, ch, 0, numSm, highBandGain);
|
|
|
|
return out;
|
|
}
|
|
|
|
|
|
void CrystalizerEQAudioProcessor::setPresetName(const juce::String &name) {
|
|
presetName = name;
|
|
}
|
|
|
|
|
|
//==============================================================================
|
|
bool CrystalizerEQAudioProcessor::hasEditor() const {
|
|
return true; // (change this to false if you choose to not supply an editor)
|
|
}
|
|
|
|
juce::AudioProcessorEditor *CrystalizerEQAudioProcessor::createEditor() {
|
|
return new CrystalizerEQAudioProcessorEditor(*this);
|
|
}
|
|
|
|
//==============================================================================
|
|
void CrystalizerEQAudioProcessor::getStateInformation(juce::MemoryBlock &destData) {
|
|
auto state = apvts.copyState();
|
|
std::unique_ptr<juce::XmlElement> xml(state.createXml());
|
|
xml->setAttribute("currentPreset", presetName);
|
|
|
|
copyXmlToBinary(*xml, destData);
|
|
}
|
|
|
|
void CrystalizerEQAudioProcessor::setStateInformation(const void *data, int sizeInBytes) {
|
|
std::unique_ptr<juce::XmlElement> xmlState(getXmlFromBinary(data, sizeInBytes));
|
|
|
|
if (xmlState != nullptr) {
|
|
if (xmlState->hasTagName(apvts.state.getType())) {
|
|
apvts.replaceState(juce::ValueTree::fromXml(*xmlState));
|
|
|
|
presetName = xmlState->getStringAttribute("currentPreset", "Init");
|
|
}
|
|
}
|
|
}
|
|
|
|
//==============================================================================
|
|
// This creates new instances of the plugin..
|
|
juce::AudioProcessor * JUCE_CALLTYPE createPluginFilter() {
|
|
return new CrystalizerEQAudioProcessor();
|
|
}
|
|
|
|
|
|
void AudioFIFO::loadSamplesToFIFO(const juce::AudioBuffer<float> &samples) {
|
|
const int numSamples = samples.getNumSamples();
|
|
|
|
const float *channelData = samples.getReadPointer(0);
|
|
|
|
const juce::SpinLock::ScopedLockType guard(lock); // <— NEU
|
|
sampleStack.ensureStorageAllocated(sampleStack.size() + numSamples);
|
|
|
|
for (int i = 0; i < numSamples; ++i) {
|
|
sampleStack.add(channelData[i]);
|
|
}
|
|
}
|
|
|
|
|
|
juce::Array<float> AudioFIFO::sendSamplesToEditor() {
|
|
const juce::SpinLock::ScopedLockType guard(lock);
|
|
juce::Array<float> copiedSamples = sampleStack;
|
|
sampleStack.clear();
|
|
|
|
return copiedSamples;
|
|
}
|