1 : // Copyright 2012 Google Inc. All Rights Reserved.
2 : //
3 : // Licensed under the Apache License, Version 2.0 (the "License");
4 : // you may not use this file except in compliance with the License.
5 : // You may obtain a copy of the License at
6 : //
7 : // http://www.apache.org/licenses/LICENSE-2.0
8 : //
9 : // Unless required by applicable law or agreed to in writing, software
10 : // distributed under the License is distributed on an "AS IS" BASIS,
11 : // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 : // See the License for the specific language governing permissions and
13 : // limitations under the License.
14 :
15 : #include "syzygy/reorder/basic_block_optimizer.h"
16 :
17 : #include "gmock/gmock.h"
18 : #include "gtest/gtest.h"
19 : #include "syzygy/block_graph/basic_block_test_util.h"
20 : #include "syzygy/block_graph/block_graph.h"
21 : #include "syzygy/core/unittest_util.h"
22 : #include "syzygy/pe/block_util.h"
23 : #include "syzygy/pe/pe_utils.h"
24 : #include "syzygy/reorder/order_generator_test.h"
25 :
26 : #include "mnemonics.h" // NOLINT
27 :
28 : namespace reorder {
29 : namespace {
30 :
31 : using block_graph::BasicBlock;
32 : using block_graph::BasicCodeBlock;
33 : using block_graph::BasicDataBlock;
34 : using block_graph::BlockGraph;
35 : using core::RelativeAddress;
36 : using grinder::basic_block_util::EntryCountType;
37 : using grinder::basic_block_util::EntryCountMap;
38 : using grinder::basic_block_util::ModuleEntryCountMap;
39 : using grinder::basic_block_util::LoadBasicBlockRanges;
40 : using grinder::basic_block_util::RelativeAddressRange;
41 : using grinder::basic_block_util::RelativeAddressRangeVector;
42 : using pe::ImageLayout;
43 : using testing::GetExeTestDataRelativePath;
44 :
45 : typedef Reorderer::Order Order;
46 :
47 : class TestBasicBlockOrderer : public BasicBlockOptimizer::BasicBlockOrderer {
48 : public:
49 : using BasicBlockOptimizer::BasicBlockOrderer::GetBasicBlockEntryCount;
50 : using BasicBlockOptimizer::BasicBlockOrderer::GetWarmestSuccessor;
51 : using BasicBlockOptimizer::BasicBlockOrderer::GetSortedJumpTargets;
52 : using BasicBlockOptimizer::BasicBlockOrderer::AddRecursiveDataReferences;
53 : using BasicBlockOptimizer::BasicBlockOrderer::AddWarmDataReferences;
54 :
55 : TestBasicBlockOrderer(
56 : const BasicBlockSubGraph& subgraph,
57 : const RelativeAddress& addr,
58 : Size size,
59 : const EntryCountMap& entry_counts)
60 : : BasicBlockOptimizer::BasicBlockOrderer(
61 E : subgraph, addr, size, entry_counts) {
62 E : }
63 : };
64 :
65 : class BasicBlockOrdererTest : public testing::BasicBlockTest {
66 : public:
67 E : virtual void SetUp() OVERRIDE {
68 E : ASSERT_NO_FATAL_FAILURE(testing::BasicBlockTest::SetUp());
69 E : ASSERT_NO_FATAL_FAILURE(InitBlockGraph());
70 E : ASSERT_NO_FATAL_FAILURE(InitBasicBlockSubGraph());
71 E : ASSERT_NO_FATAL_FAILURE(SetEntryCounts(0, 0, 0, 0, 0, 0, 0, 0));
72 : orderer_.reset(new TestBasicBlockOrderer(subgraph_,
73 : start_addr_,
74 : assembly_func_->size(),
75 E : entry_counts_));
76 E : ASSERT_TRUE(orderer_.get() != NULL);
77 E : }
78 :
79 : RelativeAddressRange MakeRange(BlockGraph::Offset offset,
80 : BlockGraph::Size size) {
81 : return RelativeAddressRange(start_addr_ + offset, size);
82 : }
83 :
84 E : BasicBlock* FindBasicBlockAt(BlockGraph::Offset offset) {
85 : typedef BasicBlockSubGraph::BBCollection BBCollection;
86 : BasicBlockSubGraph::BBCollection::iterator it =
87 E : subgraph_.basic_blocks().begin();
88 E : for (; it != subgraph_.basic_blocks().end(); ++it) {
89 E : if ((*it)->offset() == offset)
90 E : return *it;
91 E : }
92 i : return NULL;
93 E : }
94 :
95 : void SetEntryCounts(uint32 bb0, uint32 bb1, uint32 bb2, uint32 bb3,
96 E : uint32 bb4, uint32 bb5, uint32 bb6, uint32 bb7) {
97 E : entry_counts_.clear();
98 :
99 E : entry_counts_[start_addr_.value() + kBasicBlockOffsets[0]] = bb0;
100 E : entry_counts_[start_addr_.value() + kBasicBlockOffsets[1]] = bb1;
101 E : entry_counts_[start_addr_.value() + kBasicBlockOffsets[2]] = bb2;
102 E : entry_counts_[start_addr_.value() + kBasicBlockOffsets[3]] = bb3;
103 E : entry_counts_[start_addr_.value() + kBasicBlockOffsets[4]] = bb4;
104 E : entry_counts_[start_addr_.value() + kBasicBlockOffsets[5]] = bb5;
105 E : entry_counts_[start_addr_.value() + kBasicBlockOffsets[6]] = bb6;
106 E : entry_counts_[start_addr_.value() + kBasicBlockOffsets[7]] = bb7;
107 E : ASSERT_EQ(kNumCodeBasicBlocks, entry_counts_.size());
108 E : }
109 :
110 : static const size_t kBasicBlockOffsets[kNumCodeBasicBlocks];
111 :
112 : EntryCountMap entry_counts_;
113 : scoped_ptr<TestBasicBlockOrderer> orderer_;
114 : };
115 :
116 : const size_t BasicBlockOrdererTest::kBasicBlockOffsets[kNumCodeBasicBlocks] =
117 : { 0, 23, 24, 31, 36, 37, 42, 49 };
118 :
119 : class BasicBlockOptimizerTest : public testing::OrderGeneratorTest {
120 : public:
121 : typedef testing::OrderGeneratorTest Super;
122 :
123 : BasicBlockOptimizerTest()
124 : : num_decomposable_blocks_(0),
125 : num_non_decomposable_blocks_(0),
126 E : num_non_code_blocks_(0) {
127 E : }
128 :
129 E : virtual void SetUp() OVERRIDE {
130 E : ASSERT_NO_FATAL_FAILURE(Super::SetUp());
131 E : ASSERT_NO_FATAL_FAILURE(InitBlockCounts());
132 : base::FilePath pdb_path(GetExeTestDataRelativePath(
133 E : testing::kBBEntryInstrumentedTestDllPdbName));
134 E : }
135 :
136 E : void InitBlockCounts() {
137 E : ASSERT_EQ(0U, num_decomposable_blocks_);
138 E : ASSERT_EQ(0U, num_non_decomposable_blocks_);
139 E : ASSERT_EQ(0U, num_non_code_blocks_);
140 :
141 E : for (size_t i = 0; i < image_layout_.sections.size(); ++i) {
142 E : const ImageLayout::SectionInfo& section_info = image_layout_.sections[i];
143 : BlockGraph::AddressSpace::RangeMapConstIterPair ip =
144 : image_layout_.blocks.GetIntersectingBlocks(section_info.addr,
145 E : section_info.size);
146 E : for (; ip.first != ip.second; ++ip.first) {
147 E : const BlockGraph::Block* block = ip.first->second;
148 E : if (block->type() != BlockGraph::CODE_BLOCK) {
149 E : ++num_non_code_blocks_;
150 E : } else if (pe::CodeBlockIsBasicBlockDecomposable(block)) {
151 E : ++num_decomposable_blocks_;
152 E : } else {
153 E : ++num_non_decomposable_blocks_;
154 : }
155 E : }
156 E : }
157 E : }
158 :
159 : bool FindBlockByName(const base::StringPiece& name,
160 : const BlockGraph::Block** block,
161 E : BlockGraph::AddressSpace::Range* range) {
162 E : DCHECK(block != NULL);
163 E : DCHECK(range != NULL);
164 : BlockGraph::AddressSpace::RangeMapConstIter it =
165 E : image_layout_.blocks.begin();
166 E : for (; it != image_layout_.blocks.end(); ++it) {
167 E : if (it->second->name() == name) {
168 E : *range = it->first;
169 E : *block = it->second;
170 E : return true;
171 : }
172 E : }
173 i : return false;
174 E : }
175 :
176 : protected:
177 : BasicBlockOptimizer optimizer_;
178 : size_t num_decomposable_blocks_;
179 : size_t num_non_decomposable_blocks_;
180 : size_t num_non_code_blocks_;
181 : };
182 :
183 : } // namespace
184 :
185 E : TEST_F(BasicBlockOrdererTest, GetBlockEntryCount) {
186 E : ASSERT_NO_FATAL_FAILURE(SetEntryCounts(1, 0, 1, 5, 1, 0, 0, 0));
187 E : EntryCountType entry_count = 0;
188 E : EXPECT_TRUE(orderer_->GetBlockEntryCount(&entry_count));
189 E : EXPECT_EQ(1U, entry_count);
190 :
191 E : ASSERT_NO_FATAL_FAILURE(SetEntryCounts(17, 0, 1, 5, 1, 0, 0, 0));
192 E : EXPECT_TRUE(orderer_->GetBlockEntryCount(&entry_count));
193 E : EXPECT_EQ(17U, entry_count);
194 E : }
195 :
196 E : TEST_F(BasicBlockOrdererTest, GetWarmestSuccessor) {
197 E : const BasicCodeBlock* sub = BasicCodeBlock::Cast(FindBasicBlockAt(31));
198 E : ASSERT_TRUE(sub != NULL);
199 :
200 E : const BasicCodeBlock* ret = BasicCodeBlock::Cast(FindBasicBlockAt(36));
201 E : ASSERT_TRUE(ret != NULL);
202 :
203 E : TestBasicBlockOrderer::BasicBlockSet placed_bbs;
204 E : const BasicBlock* succ = NULL;
205 :
206 : // Make the fall-through as the warmest successor.
207 E : ASSERT_NO_FATAL_FAILURE(SetEntryCounts(0, 0, 0, 5, 10, 0, 0, 0));
208 E : ASSERT_TRUE(orderer_->GetWarmestSuccessor(sub, placed_bbs, &succ));
209 E : ASSERT_EQ(ret, succ);
210 :
211 : // Make the branch taken as the warmest successor.
212 E : succ = NULL;
213 E : ASSERT_NO_FATAL_FAILURE(SetEntryCounts(0, 0, 0, 10, 5, 0, 0, 0));
214 E : ASSERT_TRUE(orderer_->GetWarmestSuccessor(sub, placed_bbs, &succ));
215 E : ASSERT_EQ(sub, succ);
216 :
217 : // Give both branches the same warmth. Should preserve ordering.
218 E : succ = NULL;
219 E : ASSERT_NO_FATAL_FAILURE(SetEntryCounts(0, 0, 0, 10, 10, 0, 0, 0));
220 E : ASSERT_TRUE(orderer_->GetWarmestSuccessor(sub, placed_bbs, &succ));
221 E : ASSERT_EQ(ret, succ);
222 :
223 : // Let the warmest branch already be placed, should return the other branch.
224 E : succ = NULL;
225 E : placed_bbs.insert(ret);
226 E : ASSERT_NO_FATAL_FAILURE(SetEntryCounts(0, 0, 0, 5, 10, 0, 0, 0));
227 E : ASSERT_TRUE(orderer_->GetWarmestSuccessor(sub, placed_bbs, &succ));
228 E : ASSERT_EQ(sub, succ);
229 :
230 : // Let the warmest branch already be placed, should return the other branch.
231 : // Note that we initialize succ to non NULL to verify that it becomes NULL.
232 E : succ = sub;
233 E : placed_bbs.insert(sub);
234 E : placed_bbs.insert(ret);
235 E : ASSERT_NO_FATAL_FAILURE(SetEntryCounts(0, 0, 0, 5, 10, 0, 0, 0));
236 E : ASSERT_TRUE(orderer_->GetWarmestSuccessor(sub, placed_bbs, &succ));
237 E : ASSERT_EQ(NULL, succ);
238 E : }
239 :
240 E : TEST_F(BasicBlockOrdererTest, AddWarmDataReferences) {
241 : // Get basic block pointers to the switch, jump table, and case table.
242 E : const BasicCodeBlock* code_bb = BasicCodeBlock::Cast(FindBasicBlockAt(0));
243 E : const BasicDataBlock* jump_table = BasicDataBlock::Cast(FindBasicBlockAt(52));
244 E : const BasicDataBlock* case_table = BasicDataBlock::Cast(FindBasicBlockAt(64));
245 E : ASSERT_TRUE(code_bb != NULL);
246 E : ASSERT_TRUE(jump_table != NULL);
247 E : ASSERT_TRUE(case_table != NULL);
248 :
249 : // Capture the references from the switch basic block (offset 0).
250 E : TestBasicBlockOrderer::BasicBlockSet references;
251 E : ASSERT_TRUE(orderer_->AddWarmDataReferences(code_bb, &references));
252 E : EXPECT_EQ(2U, references.size());
253 E : EXPECT_EQ(1U, references.count(jump_table));
254 E : EXPECT_EQ(1U, references.count(case_table));
255 :
256 : // Capture the references from the case_0 basic block (offset 24).
257 E : references.clear();
258 E : code_bb = BasicCodeBlock::Cast(FindBasicBlockAt(24));
259 E : ASSERT_TRUE(orderer_->AddWarmDataReferences(code_bb, &references));
260 E : EXPECT_TRUE(references.empty());
261 E : }
262 :
263 E : TEST_F(BasicBlockOrdererTest, GetSortedJumpTargets) {
264 E : ASSERT_NO_FATAL_FAILURE(SetEntryCounts(0, 0, 2, 0, 0, 1, 3, 0));
265 E : const BasicCodeBlock* first_bb = BasicCodeBlock::Cast(FindBasicBlockAt(0));
266 E : ASSERT_TRUE(first_bb->successors().empty());
267 E : ASSERT_TRUE(!first_bb->instructions().empty());
268 E : const block_graph::Instruction& jmp_inst = first_bb->instructions().back();
269 E : ASSERT_EQ(I_JMP, jmp_inst.representation().opcode);
270 E : logging::SetMinLogLevel(-1);
271 E : std::vector<const BasicCodeBlock*> targets;
272 E : ASSERT_TRUE(orderer_->GetSortedJumpTargets(jmp_inst, &targets));
273 : ASSERT_THAT(targets,
274 : testing::ElementsAre(
275 : BasicCodeBlock::Cast(FindBasicBlockAt(42)),
276 : BasicCodeBlock::Cast(FindBasicBlockAt(24)),
277 E : BasicCodeBlock::Cast(FindBasicBlockAt(37))));
278 E : }
279 :
280 E : TEST_F(BasicBlockOrdererTest, GetStableSortedJumpTargets) {
281 E : ASSERT_NO_FATAL_FAILURE(SetEntryCounts(0, 0, 1, 0, 0, 2, 1, 0));
282 E : const BasicCodeBlock* first_bb = BasicCodeBlock::Cast(FindBasicBlockAt(0));
283 E : ASSERT_TRUE(first_bb->successors().empty());
284 E : ASSERT_TRUE(!first_bb->instructions().empty());
285 E : const block_graph::Instruction& jmp_inst = first_bb->instructions().back();
286 E : ASSERT_EQ(I_JMP, jmp_inst.representation().opcode);
287 E : logging::SetMinLogLevel(-1);
288 E : std::vector<const BasicCodeBlock*> targets;
289 E : ASSERT_TRUE(orderer_->GetSortedJumpTargets(jmp_inst, &targets));
290 : ASSERT_THAT(targets,
291 : testing::ElementsAre(
292 : BasicCodeBlock::Cast(FindBasicBlockAt(37)),
293 : BasicCodeBlock::Cast(FindBasicBlockAt(24)),
294 E : BasicCodeBlock::Cast(FindBasicBlockAt(42))));
295 E : }
296 :
297 E : TEST_F(BasicBlockOrdererTest, HotColdSeparation) {
298 E : ASSERT_NO_FATAL_FAILURE(SetEntryCounts(1, 0, 1, 5, 1, 0, 0, 0));
299 E : Order::OffsetVector warm;
300 E : Order::OffsetVector cold;
301 E : ASSERT_TRUE(orderer_->GetBasicBlockOrderings(&warm, &cold));
302 : // Note that the bb's at 52 and 64 are the jump and case tables, respectively.
303 E : EXPECT_THAT(warm, testing::ElementsAre(0, 24, 31, 36, 52, 64));
304 E : EXPECT_THAT(cold, testing::ElementsAre(23, 37, 42, 49));
305 E : }
306 :
307 E : TEST_F(BasicBlockOrdererTest, PathStraightening) {
308 : // The default control flow of the block we construct isn't very interesting
309 : // from a path straightening perspective. So, we modify it here such that the
310 : // jnz instruction the end of the basic block at offset 31 branches to case_1
311 : // (at offset 37), and then give that basic-block an elevated entry count.
312 E : BasicCodeBlock* case_1 = BasicCodeBlock::Cast(FindBasicBlockAt(37));
313 E : ASSERT_TRUE(case_1 != NULL);
314 E : ASSERT_EQ(1U, case_1->instructions().size());
315 E : ASSERT_EQ(I_CALL, case_1->instructions().front().representation().opcode);
316 :
317 E : BasicCodeBlock* jnz_bb = BasicCodeBlock::Cast(FindBasicBlockAt(31));
318 E : ASSERT_TRUE(jnz_bb != NULL);
319 E : ASSERT_EQ(1U, jnz_bb->instructions().size());
320 E : ASSERT_EQ(I_SUB, jnz_bb->instructions().front().representation().opcode);
321 E : ASSERT_EQ(2U, jnz_bb->successors().size());
322 : ASSERT_EQ(block_graph::Successor::kConditionNotEqual,
323 E : jnz_bb->successors().front().condition());
324 : jnz_bb->successors().front().set_reference(
325 E : block_graph::BasicBlockReference(BlockGraph::PC_RELATIVE_REF, 1, case_1));
326 :
327 : // Setup the entry counts such that the jump table stays in the same order
328 : // but case 1 is promoted to follow the jnz basic block.
329 E : ASSERT_NO_FATAL_FAILURE(SetEntryCounts(1, 0, 10, 5, 1, 7, 0, 0));
330 E : Order::OffsetVector warm;
331 E : Order::OffsetVector cold;
332 E : ASSERT_TRUE(orderer_->GetBasicBlockOrderings(&warm, &cold));
333 : // Note that the bb's at 52 and 64 are the jump and case tables, respectively.
334 E : EXPECT_THAT(warm, testing::ElementsAre(0, 24, 31, 37, 36, 52, 64));
335 E : EXPECT_THAT(cold, testing::ElementsAre(42, 23, 49));
336 E : }
337 :
338 E : TEST_F(BasicBlockOrdererTest, PathStraighteningAcrossJumpTable) {
339 : // Setup the entry counts such that case 1 (at offset 37) is promoted to be
340 : // the warmest path through the jump table.
341 E : ASSERT_NO_FATAL_FAILURE(SetEntryCounts(1, 0, 1, 5, 1, 7, 0, 0));
342 E : Order::OffsetVector warm;
343 E : Order::OffsetVector cold;
344 E : ASSERT_TRUE(orderer_->GetBasicBlockOrderings(&warm, &cold));
345 : // Note that the bb's at 52 and 64 are the jump and case tables, respectively.
346 E : EXPECT_THAT(warm, testing::ElementsAre(0, 37, 24, 31, 36, 52, 64));
347 E : EXPECT_THAT(cold, testing::ElementsAre(42, 23, 49));
348 E : }
349 :
350 E : TEST_F(BasicBlockOptimizerTest, Accessors) {
351 E : const std::string kSectionName(".froboz");
352 E : EXPECT_TRUE(!optimizer_.cold_section_name().empty());
353 E : EXPECT_NE(kSectionName, optimizer_.cold_section_name());
354 E : optimizer_.set_cold_section_name(kSectionName);
355 E : EXPECT_EQ(kSectionName, optimizer_.cold_section_name());
356 E : }
357 :
358 E : TEST_F(BasicBlockOptimizerTest, EmptyOrderingAllCold) {
359 E : Order order;
360 E : EntryCountMap entry_counts;
361 : ASSERT_TRUE(
362 E : optimizer_.Optimize(image_layout_, entry_counts, &order));
363 :
364 E : EXPECT_EQ(image_layout_.sections.size() + 1, order.sections.size());
365 E : EXPECT_EQ(optimizer_.cold_section_name(), order.sections.back().name);
366 E : EXPECT_EQ(Order::SectionSpec::kNewSectionId, order.sections.back().id);
367 E : EXPECT_EQ(pe::kCodeCharacteristics, order.sections.back().characteristics);
368 :
369 : // Count the blocks left in the original sections. This should only include
370 : // non-code blocks.
371 E : size_t num_non_code_blocks = 0;
372 E : size_t num_non_decomposable_blocks = 0;
373 E : for (size_t i = 0; i < image_layout_.sections.size(); ++i) {
374 E : for (size_t k = 0; k < order.sections[i].blocks.size(); ++k) {
375 E : const BlockGraph::Block* block = order.sections[i].blocks[k].block;
376 E : ASSERT_TRUE(block != NULL);
377 E : ASSERT_NE(BlockGraph::CODE_BLOCK, block->type());
378 E : ++num_non_code_blocks;
379 E : }
380 E : }
381 :
382 : // Validate that we have the expected numbers of blocks.
383 E : EXPECT_EQ(num_non_code_blocks_, num_non_code_blocks);
384 : EXPECT_EQ(num_decomposable_blocks_ + num_non_decomposable_blocks_,
385 E : order.sections.back().blocks.size());
386 E : for (size_t i = 0; i < order.sections.back().blocks.size(); ++i) {
387 E : EXPECT_TRUE(order.sections.back().blocks[i].basic_block_offsets.empty());
388 E : }
389 E : }
390 :
391 E : TEST_F(BasicBlockOptimizerTest, HotCold) {
392 : // This test does a simple manipulation of the entry counts for DllMain and
393 : // validates that some minimum number of its blocks get moved into the cold
394 : // section. We defer to the BasicBlockOrdererTest instances above for the
395 : // details Hot/Cold and Path Straightening tests.
396 E : const BlockGraph::Block* dllmain = NULL;
397 E : BlockGraph::AddressSpace::Range range;
398 E : ASSERT_TRUE(FindBlockByName("DllMain", &dllmain, &range));
399 E : ASSERT_TRUE(dllmain != NULL);
400 :
401 : using block_graph::BasicBlockSubGraph;
402 : using block_graph::BasicBlockDecomposer;
403 :
404 E : BasicBlockSubGraph subgraph;
405 E : BasicBlockDecomposer decomposer(dllmain, &subgraph);
406 E : ASSERT_TRUE(decomposer.Decompose());
407 E : ASSERT_EQ(1U, subgraph.block_descriptions().size());
408 :
409 : // Generate an entry count map with a non-zero count for every other BB.
410 E : EntryCountMap entry_counts;
411 : const BasicBlockSubGraph::BlockDescription& desc =
412 E : subgraph.block_descriptions().front();
413 : BasicBlockSubGraph::BasicBlockOrdering::const_iterator it(
414 E : desc.basic_block_order.begin());
415 E : size_t num_basic_blocks = desc.basic_block_order.size();
416 E : size_t num_hot_blocks = 0;
417 :
418 E : bool is_hot = true;
419 E : BlockGraph::Offset start_offs = subgraph.original_block()->addr().value();
420 E : for (; it != desc.basic_block_order.end(); ++it) {
421 E : if (is_hot && BasicCodeBlock::Cast(*it) != NULL) {
422 :
423 E : entry_counts[start_offs + (*it)->offset()] = 1;
424 E : ++num_hot_blocks;
425 : }
426 :
427 : // Toggle hotness for next block.
428 E : is_hot = !is_hot;
429 E : }
430 :
431 : // Create an ordering that moves dllmain to a new section.
432 E : std::string section_name(".dllmain");
433 E : Order order;
434 E : order.sections.resize(1);
435 E : order.sections[0].id = Order::SectionSpec::kNewSectionId;
436 E : order.sections[0].name = section_name;
437 E : order.sections[0].characteristics = pe::kCodeCharacteristics;
438 E : order.sections[0].blocks.push_back(Order::BlockSpec(dllmain));
439 :
440 : ASSERT_TRUE(
441 E : optimizer_.Optimize(image_layout_, entry_counts, &order));
442 :
443 E : ASSERT_EQ(image_layout_.sections.size() + 2, order.sections.size());
444 E : ASSERT_EQ(section_name, order.sections[0].name);
445 E : ASSERT_EQ(1U, order.sections[0].blocks.size());
446 E : ASSERT_TRUE(!order.sections.back().blocks.empty());
447 E : ASSERT_EQ(dllmain, order.sections[0].blocks[0].block);
448 E : ASSERT_EQ(dllmain, order.sections.back().blocks[0].block);
449 : ASSERT_LE(num_hot_blocks,
450 E : order.sections[0].blocks[0].basic_block_offsets.size());
451 :
452 : // Since data BBs that are referred to by 'hot' code BBs also make
453 : // it into the hot BB list, there could be fewer cold blocks than expected.
454 : ASSERT_GE(num_basic_blocks - num_hot_blocks,
455 E : order.sections.back().blocks[0].basic_block_offsets.size());
456 E : }
457 :
458 : } // namespace reorder
|