Skip to content

Commit

Permalink
simplify vector load from json (#105)
Browse files Browse the repository at this point in the history
this reduces number of memory allocations during model load
  • Loading branch information
shaforostoff authored Jul 21, 2024
1 parent 76cc2f2 commit 028e648
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 17 deletions.
18 changes: 5 additions & 13 deletions NAM/get_dsp.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -68,13 +68,10 @@ void verify_config_version(const std::string versionStr)

std::vector<float> GetWeights(nlohmann::json const& j, const std::filesystem::path config_path)
{
if (j.find("weights") != j.end())
auto it = j.find("weights");
if (it != j.end())
{
auto weight_list = j["weights"];
std::vector<float> weights;
for (auto it = weight_list.begin(); it != weight_list.end(); ++it)
weights.push_back(*it);
return weights;
return *it;
}
else
throw std::runtime_error("Corrupted model file is missing weights.");
Expand Down Expand Up @@ -153,9 +150,7 @@ std::unique_ptr<DSP> get_dsp(dspData& conf)
{
const int channels = config["channels"];
const bool batchnorm = config["batchnorm"];
std::vector<int> dilations;
for (size_t i = 0; i < config["dilations"].size(); i++)
dilations.push_back(config["dilations"][i]);
std::vector<int> dilations = config["dilations"];
const std::string activation = config["activation"];
out = std::make_unique<convnet::ConvNet>(channels, dilations, batchnorm, activation, weights, expectedSampleRate);
}
Expand All @@ -172,12 +167,9 @@ std::unique_ptr<DSP> get_dsp(dspData& conf)
for (size_t i = 0; i < config["layers"].size(); i++)
{
nlohmann::json layer_config = config["layers"][i];
std::vector<int> dilations;
for (size_t j = 0; j < layer_config["dilations"].size(); j++)
dilations.push_back(layer_config["dilations"][j]);
layer_array_params.push_back(
wavenet::LayerArrayParams(layer_config["input_size"], layer_config["condition_size"], layer_config["head_size"],
layer_config["channels"], layer_config["kernel_size"], dilations,
layer_config["channels"], layer_config["kernel_size"], layer_config["dilations"],
layer_config["activation"], layer_config["gated"], layer_config["head_bias"]));
}
const bool with_head = config["head"] == NULL;
Expand Down
7 changes: 3 additions & 4 deletions NAM/wavenet.h
Original file line number Diff line number Diff line change
Expand Up @@ -58,20 +58,19 @@ class LayerArrayParams
{
public:
LayerArrayParams(const int input_size_, const int condition_size_, const int head_size_, const int channels_,
const int kernel_size_, const std::vector<int>& dilations_, const std::string activation_,
const int kernel_size_, const std::vector<int>&& dilations_, const std::string activation_,
const bool gated_, const bool head_bias_)
: input_size(input_size_)
, condition_size(condition_size_)
, head_size(head_size_)
, channels(channels_)
, kernel_size(kernel_size_)
, dilations(std::move(dilations_))
, activation(activation_)
, gated(gated_)
, head_bias(head_bias_)
{
for (size_t i = 0; i < dilations_.size(); i++)
this->dilations.push_back(dilations_[i]);
};
}

const int input_size;
const int condition_size;
Expand Down

0 comments on commit 028e648

Please sign in to comment.