Skip to content

Commit c456446

Browse files
committed
Address 'google-*' clang-tidy remarks
1 parent 028996d commit c456446

5 files changed

Lines changed: 21 additions & 24 deletions

File tree

.clang-tidy

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,12 @@
11
Checks: >
22
bugprone-*,
3+
google-*,
34
modernize-*,
45
misc-*,
56
performance-*,
67
portability-*,
78
readability-*,
8-
-google-readability-braces-around-statements,
9-
-google-readability-namespace-comments,
10-
-google-runtime-references,
9+
-google-build-using-namespace,
1110
-misc-non-private-member-variables-in-classes,
1211
-misc-const-correctness,
1312
-misc-include-cleaner,

app/Graph/build.cpp

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -138,16 +138,18 @@ void build_graph_linear(it_lab_ai::Graph& graph, it_lab_ai::Tensor& input,
138138
if (comments) std::cout << "Input set in graph." << '\n';
139139

140140
graph.makeConnection(a1, layers[0]);
141-
if (comments)
141+
if (comments) {
142142
std::cout << "Connection made between InputLayer and first layer." << '\n';
143+
}
143144

144145
for (size_t i = 0; i < layers.size() - 1; ++i) {
145146
if (layerpostop[i]) {
146147
layers[i - 1]->postops.layers.push_back(layers[i]);
147148
layers[i - 1]->postops.count++;
148149
graph.makeConnection(layers[i - 1], layers[i + 1]);
149-
} else if (!layerpostop[i + 1])
150+
} else if (!layerpostop[i + 1]) {
150151
graph.makeConnection(layers[i], layers[i + 1]);
152+
}
151153
}
152154

153155
graph.setOutput(layers.back(), output);

include/layers/InputLayer.hpp

Lines changed: 9 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
#pragma once
22
#include <algorithm>
33
#include <cmath>
4+
#include <cstdint>
45

56
#include "layers/Layer.hpp"
67

@@ -76,10 +77,8 @@ class InputLayer : public Layer {
7677
}
7778
}
7879
}
79-
Shape sh1({static_cast<unsigned long long>(n),
80-
static_cast<unsigned long long>(h),
81-
static_cast<unsigned long long>(w),
82-
static_cast<unsigned long long>(c)});
80+
Shape sh1({static_cast<uint64_t>(n), static_cast<uint64_t>(h),
81+
static_cast<uint64_t>(w), static_cast<uint64_t>(c)});
8382
output[0] = make_tensor<int>(res, sh1);
8483
break;
8584
}
@@ -103,10 +102,8 @@ class InputLayer : public Layer {
103102
}
104103
}
105104
}
106-
Shape sh1({static_cast<unsigned long long>(n),
107-
static_cast<unsigned long long>(c),
108-
static_cast<unsigned long long>(h),
109-
static_cast<unsigned long long>(w)});
105+
Shape sh1({static_cast<uint64_t>(n), static_cast<uint64_t>(c),
106+
static_cast<uint64_t>(h), static_cast<uint64_t>(w)});
110107
output[0] = make_tensor<int>(res, sh1);
111108
break;
112109
}
@@ -144,10 +141,8 @@ class InputLayer : public Layer {
144141
}
145142
}
146143
}
147-
Shape sh1({static_cast<unsigned long long>(n),
148-
static_cast<unsigned long long>(h),
149-
static_cast<unsigned long long>(w),
150-
static_cast<unsigned long long>(c)});
144+
Shape sh1({static_cast<uint64_t>(n), static_cast<uint64_t>(h),
145+
static_cast<uint64_t>(w), static_cast<uint64_t>(c)});
151146
output[0] = make_tensor<float>(res, sh1);
152147
break;
153148
}
@@ -171,10 +166,8 @@ class InputLayer : public Layer {
171166
}
172167
}
173168
}
174-
Shape sh1({static_cast<unsigned long long>(n),
175-
static_cast<unsigned long long>(c),
176-
static_cast<unsigned long long>(h),
177-
static_cast<unsigned long long>(w)});
169+
Shape sh1({static_cast<uint64_t>(n), static_cast<uint64_t>(c),
170+
static_cast<uint64_t>(h), static_cast<uint64_t>(w)});
178171
output[0] = make_tensor<float>(res, sh1);
179172
break;
180173
}

src/Weights_Reader/reader_weights.cpp

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,8 @@ json read_json(const std::string& filename) {
5757
return json{};
5858
}
5959

60-
char* data = (char*)mmap(nullptr, sb.st_size, PROT_READ, MAP_PRIVATE, fd, 0);
60+
char* data = static_cast<char*>(
61+
mmap(nullptr, sb.st_size, PROT_READ, MAP_PRIVATE, fd, 0));
6162
json result = json::parse(data, data + sb.st_size);
6263

6364
munmap(data, sb.st_size);

src/layers_oneDNN/ConvLayer.cpp

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -249,12 +249,14 @@ void ConvLayerOneDnn::initialize_convolution(const Shape& input_shape,
249249
src_memory_ = dnnl::memory(conv_pd.src_desc(), *engine_);
250250
weights_memory_ = dnnl::memory(conv_pd.weights_desc(), *engine_);
251251
dst_memory_ = dnnl::memory(conv_pd.dst_desc(), *engine_);
252-
if (!bias_->empty())
252+
if (!bias_->empty()) {
253253
bias_memory_ = dnnl::memory(conv_pd.bias_desc(), *engine_);
254+
}
254255

255256
fill_memory_with_tensor(weights_memory_, *kernel_, data_type);
256-
if (!bias_->empty())
257+
if (!bias_->empty()) {
257258
fill_memory_with_tensor(bias_memory_, *bias_, data_type);
259+
}
258260

259261
conv_prim_ = std::make_unique<dnnl::convolution_forward>(conv_pd);
260262
initialized_ = true;

0 commit comments

Comments
 (0)