From c456446aace2b9eca1ed17e78ae3dd8e623be28f Mon Sep 17 00:00:00 2001 From: Arseniy Obolenskiy Date: Sat, 7 Mar 2026 18:40:30 +0100 Subject: [PATCH] Address 'google-*' clang-tidy remarks --- .clang-tidy | 5 ++--- app/Graph/build.cpp | 6 ++++-- include/layers/InputLayer.hpp | 25 +++++++++---------------- src/Weights_Reader/reader_weights.cpp | 3 ++- src/layers_oneDNN/ConvLayer.cpp | 6 ++++-- 5 files changed, 21 insertions(+), 24 deletions(-) diff --git a/.clang-tidy b/.clang-tidy index 6cd376ba..d36779f1 100644 --- a/.clang-tidy +++ b/.clang-tidy @@ -1,13 +1,12 @@ Checks: > bugprone-*, + google-*, modernize-*, misc-*, performance-*, portability-*, readability-*, - -google-readability-braces-around-statements, - -google-readability-namespace-comments, - -google-runtime-references, + -google-build-using-namespace, -misc-non-private-member-variables-in-classes, -misc-const-correctness, -misc-include-cleaner, diff --git a/app/Graph/build.cpp b/app/Graph/build.cpp index 381e8fcd..4054bf79 100644 --- a/app/Graph/build.cpp +++ b/app/Graph/build.cpp @@ -138,16 +138,18 @@ void build_graph_linear(it_lab_ai::Graph& graph, it_lab_ai::Tensor& input, if (comments) std::cout << "Input set in graph." << '\n'; graph.makeConnection(a1, layers[0]); - if (comments) + if (comments) { std::cout << "Connection made between InputLayer and first layer." << '\n'; + } for (size_t i = 0; i < layers.size() - 1; ++i) { if (layerpostop[i]) { layers[i - 1]->postops.layers.push_back(layers[i]); layers[i - 1]->postops.count++; graph.makeConnection(layers[i - 1], layers[i + 1]); - } else if (!layerpostop[i + 1]) + } else if (!layerpostop[i + 1]) { graph.makeConnection(layers[i], layers[i + 1]); + } } graph.setOutput(layers.back(), output); diff --git a/include/layers/InputLayer.hpp b/include/layers/InputLayer.hpp index 0bbd36ec..e0de8e6e 100644 --- a/include/layers/InputLayer.hpp +++ b/include/layers/InputLayer.hpp @@ -1,6 +1,7 @@ #pragma once #include #include +#include #include "layers/Layer.hpp" @@ -76,10 +77,8 @@ class InputLayer : public Layer { } } } - Shape sh1({static_cast(n), - static_cast(h), - static_cast(w), - static_cast(c)}); + Shape sh1({static_cast(n), static_cast(h), + static_cast(w), static_cast(c)}); output[0] = make_tensor(res, sh1); break; } @@ -103,10 +102,8 @@ class InputLayer : public Layer { } } } - Shape sh1({static_cast(n), - static_cast(c), - static_cast(h), - static_cast(w)}); + Shape sh1({static_cast(n), static_cast(c), + static_cast(h), static_cast(w)}); output[0] = make_tensor(res, sh1); break; } @@ -144,10 +141,8 @@ class InputLayer : public Layer { } } } - Shape sh1({static_cast(n), - static_cast(h), - static_cast(w), - static_cast(c)}); + Shape sh1({static_cast(n), static_cast(h), + static_cast(w), static_cast(c)}); output[0] = make_tensor(res, sh1); break; } @@ -171,10 +166,8 @@ class InputLayer : public Layer { } } } - Shape sh1({static_cast(n), - static_cast(c), - static_cast(h), - static_cast(w)}); + Shape sh1({static_cast(n), static_cast(c), + static_cast(h), static_cast(w)}); output[0] = make_tensor(res, sh1); break; } diff --git a/src/Weights_Reader/reader_weights.cpp b/src/Weights_Reader/reader_weights.cpp index b84fdaa1..47b4503b 100644 --- a/src/Weights_Reader/reader_weights.cpp +++ b/src/Weights_Reader/reader_weights.cpp @@ -57,7 +57,8 @@ json read_json(const std::string& filename) { return json{}; } - char* data = (char*)mmap(nullptr, sb.st_size, PROT_READ, MAP_PRIVATE, fd, 0); + char* data = static_cast( + mmap(nullptr, sb.st_size, PROT_READ, MAP_PRIVATE, fd, 0)); json result = json::parse(data, data + sb.st_size); munmap(data, sb.st_size); diff --git a/src/layers_oneDNN/ConvLayer.cpp b/src/layers_oneDNN/ConvLayer.cpp index 1c2985b2..b56e4f2e 100644 --- a/src/layers_oneDNN/ConvLayer.cpp +++ b/src/layers_oneDNN/ConvLayer.cpp @@ -249,12 +249,14 @@ void ConvLayerOneDnn::initialize_convolution(const Shape& input_shape, src_memory_ = dnnl::memory(conv_pd.src_desc(), *engine_); weights_memory_ = dnnl::memory(conv_pd.weights_desc(), *engine_); dst_memory_ = dnnl::memory(conv_pd.dst_desc(), *engine_); - if (!bias_->empty()) + if (!bias_->empty()) { bias_memory_ = dnnl::memory(conv_pd.bias_desc(), *engine_); + } fill_memory_with_tensor(weights_memory_, *kernel_, data_type); - if (!bias_->empty()) + if (!bias_->empty()) { fill_memory_with_tensor(bias_memory_, *bias_, data_type); + } conv_prim_ = std::make_unique(conv_pd); initialized_ = true;