Skip to content

Commit

Permalink
input_size
Browse files Browse the repository at this point in the history
  • Loading branch information
olilarkin committed May 4, 2024
1 parent 23171a1 commit a4659f8
Show file tree
Hide file tree
Showing 5 changed files with 24 additions and 24 deletions.
6 changes: 3 additions & 3 deletions NAM/get_dsp.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -162,9 +162,9 @@ std::unique_ptr<DSP> GetDSP(dspData& conf)
else if (architecture == "LSTM")
{
const int numLayers = config["num_layers"];
const int input_size = config["input_size"];
const int hidden_size = config["hidden_size"];
out = std::make_unique<lstm::LSTM>(numLayers, input_size, hidden_size, weights, expectedSampleRate);
const int inputSize = config["input_size"];
const int hiddenSize = config["hidden_size"];
out = std::make_unique<lstm::LSTM>(numLayers, inputSize, hiddenSize, weights, expectedSampleRate);
}
else if (architecture == "WaveNet")
{
Expand Down
18 changes: 9 additions & 9 deletions NAM/lstm.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,12 @@
#include "dsp.h"
#include "lstm.h"

nam::lstm::LSTMCell::LSTMCell(const int input_size, const int hidden_size, weights_it& weights)
nam::lstm::LSTMCell::LSTMCell(const int inputSize, const int hidden_size, weights_it& weights)
{
// Resize arrays
this->_w.resize(4 * hidden_size, input_size + hidden_size);
this->_w.resize(4 * hidden_size, inputSize + hidden_size);
this->_b.resize(4 * hidden_size);
this->_xh.resize(input_size + hidden_size);
this->_xh.resize(inputSize + hidden_size);
this->_ifgo.resize(4 * hidden_size);
this->_c.resize(hidden_size);

Expand All @@ -20,7 +20,7 @@ nam::lstm::LSTMCell::LSTMCell(const int input_size, const int hidden_size, weigh
this->_w(i, j) = *(weights++);
for (int i = 0; i < this->_b.size(); i++)
this->_b[i] = *(weights++);
const int h_offset = input_size;
const int h_offset = inputSize;
for (int i = 0; i < hidden_size; i++)
this->_xh[i + h_offset] = *(weights++);
for (int i = 0; i < hidden_size; i++)
Expand All @@ -30,17 +30,17 @@ nam::lstm::LSTMCell::LSTMCell(const int input_size, const int hidden_size, weigh
void nam::lstm::LSTMCell::Process(const Eigen::VectorXf& x)
{
const long hidden_size = this->_get_hidden_size();
const long input_size = this->_get_input_size();
const long inputSize = this->_get_input_size();
// Assign inputs
this->_xh(Eigen::seq(0, input_size - 1)) = x;
this->_xh(Eigen::seq(0, inputSize - 1)) = x;
// The matmul
this->_ifgo = this->_w * this->_xh + this->_b;
// Elementwise updates (apply nonlinearities here)
const long i_offset = 0;
const long f_offset = hidden_size;
const long g_offset = 2 * hidden_size;
const long o_offset = 3 * hidden_size;
const long h_offset = input_size;
const long h_offset = inputSize;

if (activations::Activation::sUsingFastTanh)
{
Expand All @@ -64,14 +64,14 @@ void nam::lstm::LSTMCell::Process(const Eigen::VectorXf& x)
}
}

nam::lstm::LSTM::LSTM(const int numLayers, const int input_size, const int hidden_size, const std::vector<float>& weights,
nam::lstm::LSTM::LSTM(const int numLayers, const int inputSize, const int hidden_size, const std::vector<float>& weights,
const double expectedSampleRate)
: DSP(expectedSampleRate)
{
this->mInput.resize(1);
auto it = weights.begin();
for (int i = 0; i < numLayers; i++)
this->mLayers.push_back(LSTMCell(i == 0 ? input_size : hidden_size, hidden_size, it));
this->mLayers.push_back(LSTMCell(i == 0 ? inputSize : hidden_size, hidden_size, it));
this->mHeadWeight.resize(hidden_size);
for (int i = 0; i < hidden_size; i++)
this->mHeadWeight[i] = *(it++);
Expand Down
4 changes: 2 additions & 2 deletions NAM/lstm.h
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ namespace lstm
class LSTMCell
{
public:
LSTMCell(const int input_size, const int hidden_size, weights_it& weights);
LSTMCell(const int inputSize, const int hidden_size, weights_it& weights);
Eigen::VectorXf get_hidden_state() const { return this->_xh(Eigen::placeholders::lastN(this->_get_hidden_size())); };
void Process(const Eigen::VectorXf& x);

Expand Down Expand Up @@ -50,7 +50,7 @@ class LSTMCell
class LSTM : public DSP
{
public:
LSTM(const int numLayers, const int input_size, const int hidden_size, const std::vector<float>& weights,
LSTM(const int numLayers, const int inputSize, const int hidden_size, const std::vector<float>& weights,
const double expectedSampleRate = -1.0);
~LSTM() = default;

Expand Down
12 changes: 6 additions & 6 deletions NAM/wavenet.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -59,10 +59,10 @@ void nam::wavenet::_Layer::set_num_frames_(const long numFrames)

#define LAYER_ARRAY_BUFFER_SIZE 65536

nam::wavenet::_LayerArray::_LayerArray(const int input_size, const int condition_size, const int head_size,
nam::wavenet::_LayerArray::_LayerArray(const int inputSize, const int condition_size, const int head_size,
const int channels, const int kernelSize, const std::vector<int>& dilations,
const std::string activation, const bool gated, const bool head_bias)
: _rechannel(input_size, channels, false)
: _rechannel(inputSize, channels, false)
, _head_rechannel(channels, head_size, head_bias)
{
for (size_t i = 0; i < dilations.size(); i++)
Expand Down Expand Up @@ -180,13 +180,13 @@ void nam::wavenet::_LayerArray::RewindBuffers()

// Head =======================================================================

nam::wavenet::Head::Head(const int input_size, const int numLayers, const int channels, const std::string activation)
nam::wavenet::Head::Head(const int inputSize, const int numLayers, const int channels, const std::string activation)
: _channels(channels)
, _head(numLayers > 0 ? channels : input_size, 1, true)
, _head(numLayers > 0 ? channels : inputSize, 1, true)
, _activation(activations::Activation::GetActivation(activation))
{
assert(numLayers > 0);
int dx = input_size;
int dx = inputSize;
for (int i = 0; i < numLayers; i++)
{
this->_layers.push_back(Conv1x1(dx, i == numLayers - 1 ? 1 : channels, true));
Expand Down Expand Up @@ -252,7 +252,7 @@ nam::wavenet::WaveNet::WaveNet(const std::vector<nam::wavenet::LayerArrayParams>
for (size_t i = 0; i < layer_array_params.size(); i++)
{
this->_layer_arrays.push_back(nam::wavenet::_LayerArray(
layer_array_params[i].input_size, layer_array_params[i].condition_size, layer_array_params[i].head_size,
layer_array_params[i].inputSize, layer_array_params[i].condition_size, layer_array_params[i].head_size,
layer_array_params[i].channels, layer_array_params[i].kernelSize, layer_array_params[i].dilations,
layer_array_params[i].activation, layer_array_params[i].gated, layer_array_params[i].head_bias));
this->_layer_array_outputs.push_back(Eigen::MatrixXf(layer_array_params[i].channels, 0));
Expand Down
8 changes: 4 additions & 4 deletions NAM/wavenet.h
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ class LayerArrayParams
LayerArrayParams(const int input_size_, const int condition_size_, const int head_size_, const int channels_,
const int kernel_size_, const std::vector<int>& dilations_, const std::string activation_,
const bool gated_, const bool head_bias_)
: input_size(input_size_)
: inputSize(input_size_)
, condition_size(condition_size_)
, head_size(head_size_)
, channels(channels_)
Expand All @@ -73,7 +73,7 @@ class LayerArrayParams
this->dilations.push_back(dilations_[i]);
};

const int input_size;
const int inputSize;
const int condition_size;
const int head_size;
const int channels;
Expand All @@ -88,7 +88,7 @@ class LayerArrayParams
class _LayerArray
{
public:
_LayerArray(const int input_size, const int condition_size, const int head_size, const int channels,
_LayerArray(const int inputSize, const int condition_size, const int head_size, const int channels,
const int kernelSize, const std::vector<int>& dilations, const std::string activation, const bool gated,
const bool head_bias);

Expand Down Expand Up @@ -143,7 +143,7 @@ class _LayerArray
class Head
{
public:
Head(const int input_size, const int numLayers, const int channels, const std::string activation);
Head(const int inputSize, const int numLayers, const int channels, const std::string activation);
void SetWeights(weights_it& weights);
// NOTE: the head transforms the provided input by applying a nonlinearity
// to it in-place!
Expand Down

0 comments on commit a4659f8

Please sign in to comment.