Skip to content

Commit 9836f6e

Browse files
committed
Fixes suggested by clang-tidy
- fix instances of std::ios::fmtflags(0) which is undefined (apparently) - use emplace_back() when appropriate to avoid unnecessary copies
1 parent ae99b67 commit 9836f6e

16 files changed

+42
-39
lines changed

src/colvar.cpp

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1280,7 +1280,7 @@ int colvar::init_dependencies() {
12801280
// Initialize feature_states for each instance
12811281
feature_states.reserve(f_cv_ntot);
12821282
for (i = feature_states.size(); i < f_cv_ntot; i++) {
1283-
feature_states.push_back(feature_state(true, false));
1283+
feature_states.emplace_back(true, false);
12841284
// Most features are available, so we set them so
12851285
// and list exceptions below
12861286
}
@@ -2043,8 +2043,8 @@ void colvar::communicate_forces()
20432043
func_grads.reserve(cvcs.size());
20442044
for (i = 0; i < cvcs.size(); i++) {
20452045
if (!cvcs[i]->is_enabled()) continue;
2046-
func_grads.push_back(cvm::matrix2d<cvm::real> (x.size(),
2047-
cvcs[i]->value().size()));
2046+
func_grads.emplace_back(x.size(),
2047+
cvcs[i]->value().size());
20482048
}
20492049
int res = cvm::proxy->run_colvar_gradient_callback(scripted_function, sorted_cvc_values, func_grads);
20502050

@@ -2803,7 +2803,7 @@ int colvar::calc_acf()
28032803
case acf_vel:
28042804
// allocate space for the velocities history
28052805
for (i = 0; i < acf_stride; i++) {
2806-
acf_v_history.push_back(std::list<colvarvalue>());
2806+
acf_v_history.emplace_back();
28072807
}
28082808
acf_v_history_p = acf_v_history.begin();
28092809
break;
@@ -2812,7 +2812,7 @@ int colvar::calc_acf()
28122812
case acf_p2coor:
28132813
// allocate space for the coordinates history
28142814
for (i = 0; i < acf_stride; i++) {
2815-
acf_x_history.push_back(std::list<colvarvalue>());
2815+
acf_x_history.emplace_back();
28162816
}
28172817
acf_x_history_p = acf_x_history.begin();
28182818
break;
@@ -3003,7 +3003,7 @@ int colvar::calc_runave()
30033003

30043004
acf_nframes = 0;
30053005

3006-
x_history.push_back(std::list<colvarvalue>());
3006+
x_history.emplace_back();
30073007
x_history_p = x_history.begin();
30083008

30093009
} else {

src/colvar_neuralnetworkcompute.cpp

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -246,15 +246,15 @@ bool neuralNetworkCompute::addDenseLayer(const denseLayer& layer) {
246246
if (m_dense_layers.empty()) {
247247
// add layer to this ann directly if m_dense_layers is empty
248248
m_dense_layers.push_back(layer);
249-
m_layers_output.push_back(std::vector<double>(layer.getOutputSize()));
250-
m_grads_tmp.push_back(std::vector<std::vector<double>>(layer.getOutputSize(), std::vector<double>(layer.getInputSize(), 0)));
249+
m_layers_output.emplace_back(layer.getOutputSize());
250+
m_grads_tmp.emplace_back(layer.getOutputSize(), std::vector<double>(layer.getInputSize(), 0));
251251
return true;
252252
} else {
253253
// otherwise, we need to check if the output of last layer in m_dense_layers matches the input of layer to be added
254254
if (m_dense_layers.back().getOutputSize() == layer.getInputSize()) {
255255
m_dense_layers.push_back(layer);
256-
m_layers_output.push_back(std::vector<double>(layer.getOutputSize()));
257-
m_grads_tmp.push_back(std::vector<std::vector<double>>(layer.getOutputSize(), std::vector<double>(layer.getInputSize(), 0)));
256+
m_layers_output.emplace_back(layer.getOutputSize());
257+
m_grads_tmp.emplace_back(layer.getOutputSize(), std::vector<double>(layer.getInputSize(), 0));
258258
return true;
259259
} else {
260260
return false;

src/colvaratoms.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -213,7 +213,7 @@ int cvm::atom_group::init_dependencies() {
213213
// default as unavailable, not enabled
214214
feature_states.reserve(f_ag_ntot);
215215
for (i = feature_states.size(); i < colvardeps::f_ag_ntot; i++) {
216-
feature_states.push_back(feature_state(false, false));
216+
feature_states.emplace_back(false, false);
217217
}
218218

219219
// Features that are implemented (or not) by all atom groups

src/colvarbias.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -248,7 +248,7 @@ int colvarbias::init_dependencies() {
248248
// Initialize feature_states for each instance
249249
feature_states.reserve(f_cvb_ntot);
250250
for (i = feature_states.size(); i < f_cvb_ntot; i++) {
251-
feature_states.push_back(feature_state(true, false));
251+
feature_states.emplace_back(true, false);
252252
// Most features are available, so we set them so
253253
// and list exceptions below
254254
}
@@ -352,7 +352,7 @@ int colvarbias::add_colvar(std::string const &cv_name)
352352
// although possibly not at all timesteps
353353
add_child(cv);
354354

355-
colvar_forces.push_back(colvarvalue());
355+
colvar_forces.emplace_back();
356356
colvar_forces.back().type(cv->value()); // make sure each force is initialized to zero
357357
colvar_forces.back().is_derivative(); // colvar constraints are not applied to the force
358358
colvar_forces.back().reset();

src/colvarbias_abf.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -925,7 +925,7 @@ template <typename OST> OST & colvarbias_abf::write_state_data_template_(OST &os
925925
{
926926
auto flags = os.flags();
927927

928-
os.setf(std::ios::fmtflags(std::ios::dec), std::ios::floatfield); // default floating-point format
928+
os.unsetf(std::ios::floatfield); // default floating-point format
929929

930930
write_state_data_key(os, "samples");
931931
samples->write_raw(os, 8);
@@ -941,7 +941,7 @@ template <typename OST> OST & colvarbias_abf::write_state_data_template_(OST &os
941941
}
942942

943943
if (b_CZAR_estimator) {
944-
os.setf(std::ios::fmtflags(std::ios::dec), std::ios::floatfield); // default floating-point format
944+
os.unsetf(std::ios::floatfield); // default floating-point format
945945
write_state_data_key(os, "z_samples");
946946
z_samples->write_raw(os, 8);
947947
write_state_data_key(os, "z_gradient");

src/colvarbias_histogram.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -216,7 +216,7 @@ cvm::memory_stream & colvarbias_histogram::read_state_data(cvm::memory_stream& i
216216
std::ostream & colvarbias_histogram::write_state_data(std::ostream& os)
217217
{
218218
std::ios::fmtflags flags(os.flags());
219-
os.setf(std::ios::fmtflags(std::ios::dec), std::ios::floatfield);
219+
os.unsetf(std::ios::floatfield);
220220
write_state_data_key(os, "grid");
221221
grid->write_raw(os, 8);
222222
os.flags(flags);

src/colvarbias_histogram_reweight_amd.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -318,7 +318,7 @@ void colvarbias_reweightaMD::compute_cumulant_expansion_factor(
318318
template <typename OST> OST & colvarbias_reweightaMD::write_state_data_template_(OST& os)
319319
{
320320
std::ios::fmtflags flags(os.flags());
321-
os.setf(std::ios::fmtflags(std::ios::dec), std::ios::floatfield);
321+
os.unsetf(std::ios::floatfield);
322322
write_state_data_key(os, "grid");
323323
grid->write_raw(os, 8);
324324
write_state_data_key(os, "grid_count");

src/colvarbias_meta.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1705,7 +1705,7 @@ template <typename IST> IST &colvarbias_meta::read_hill_template_(IST &is)
17051705
}
17061706

17071707
hill_iter const hills_end = hills.end();
1708-
hills.push_back(hill(h_it, h_weight, h_centers, h_sigmas, h_replica));
1708+
hills.emplace_back(h_it, h_weight, h_centers, h_sigmas, h_replica);
17091709
if (new_hills_begin == hills_end) {
17101710
// if new_hills_begin is unset, set it for the first time
17111711
new_hills_begin = hills.end();

src/colvarcomp.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -308,7 +308,7 @@ int colvar::cvc::init_dependencies() {
308308
feature_states.reserve(f_cvc_ntot);
309309
for (i = feature_states.size(); i < colvardeps::f_cvc_ntot; i++) {
310310
bool avail = is_dynamic(i) ? false : true;
311-
feature_states.push_back(feature_state(avail, false));
311+
feature_states.emplace_back(avail, false);
312312
}
313313

314314
// Features that are implemented by all cvcs by default

src/colvarcomp_neuralnetwork.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -77,14 +77,14 @@ int colvar::neuralNetwork::init(std::string const &conf)
7777
return cvm::error("Unknown activation function name: \"" + function_name + "\".\n",
7878
COLVARS_INPUT_ERROR);
7979
}
80-
activation_functions.push_back(std::make_pair(false, function_name));
80+
activation_functions.emplace_back(false, function_name);
8181
cvm::log(std::string{"The activation function for layer["} + cvm::to_str(num_activation_functions + 1) + std::string{"] is "} + function_name + '\n');
8282
++num_activation_functions;
8383
#ifdef LEPTON
8484
} else if (key_lookup(conf, lookup_key_custom.c_str())) {
8585
std::string function_expression;
8686
get_keyval(conf, lookup_key_custom.c_str(), function_expression, std::string(""));
87-
activation_functions.push_back(std::make_pair(true, function_expression));
87+
activation_functions.emplace_back(true, function_expression);
8888
cvm::log(std::string{"The custom activation function for layer["} + cvm::to_str(num_activation_functions + 1) + std::string{"] is "} + function_expression + '\n');
8989
++num_activation_functions;
9090
#endif

0 commit comments

Comments
 (0)