1 : // Copyright 2012 Google Inc. All Rights Reserved.
2 : //
3 : // Licensed under the Apache License, Version 2.0 (the "License");
4 : // you may not use this file except in compliance with the License.
5 : // You may obtain a copy of the License at
6 : //
7 : // http://www.apache.org/licenses/LICENSE-2.0
8 : //
9 : // Unless required by applicable law or agreed to in writing, software
10 : // distributed under the License is distributed on an "AS IS" BASIS,
11 : // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 : // See the License for the specific language governing permissions and
13 : // limitations under the License.
14 :
15 : #include "syzygy/reorder/basic_block_optimizer.h"
16 :
17 : #include "gmock/gmock.h"
18 : #include "gtest/gtest.h"
19 : #include "syzygy/block_graph/basic_block_test_util.h"
20 : #include "syzygy/block_graph/block_graph.h"
21 : #include "syzygy/core/unittest_util.h"
22 : #include "syzygy/pe/pe_utils.h"
23 : #include "syzygy/reorder/order_generator_test.h"
24 :
25 : #include "mnemonics.h" // NOLINT
26 :
27 : namespace reorder {
28 : namespace {
29 :
30 : using block_graph::BasicBlock;
31 : using block_graph::BasicCodeBlock;
32 : using block_graph::BasicDataBlock;
33 : using block_graph::BlockGraph;
34 : using core::RelativeAddress;
35 : using grinder::basic_block_util::EntryCountType;
36 : using grinder::basic_block_util::IndexedFrequencyInformation;
37 : using grinder::basic_block_util::IndexedFrequencyMap;
38 : using grinder::basic_block_util::LoadBasicBlockRanges;
39 : using grinder::basic_block_util::RelativeAddressRange;
40 : using grinder::basic_block_util::RelativeAddressRangeVector;
41 : using pe::ImageLayout;
42 : using testing::GetExeTestDataRelativePath;
43 :
44 : typedef Reorderer::Order Order;
45 :
46 : class TestBasicBlockOrderer : public BasicBlockOptimizer::BasicBlockOrderer {
47 : public:
48 : using BasicBlockOptimizer::BasicBlockOrderer::GetBasicBlockEntryCount;
49 : using BasicBlockOptimizer::BasicBlockOrderer::GetWarmestSuccessor;
50 : using BasicBlockOptimizer::BasicBlockOrderer::GetSortedJumpTargets;
51 : using BasicBlockOptimizer::BasicBlockOrderer::AddRecursiveDataReferences;
52 : using BasicBlockOptimizer::BasicBlockOrderer::AddWarmDataReferences;
53 :
54 : TestBasicBlockOrderer(
55 : const BasicBlockSubGraph& subgraph,
56 : const RelativeAddress& addr,
57 : Size size,
58 : const IndexedFrequencyInformation& entry_counts)
59 : : BasicBlockOptimizer::BasicBlockOrderer(
60 E : subgraph, addr, size, entry_counts) {
61 E : }
62 : };
63 :
64 : class BasicBlockOrdererTest : public testing::BasicBlockTest {
65 : public:
66 E : virtual void SetUp() override {
67 E : ASSERT_NO_FATAL_FAILURE(testing::BasicBlockTest::SetUp());
68 E : ASSERT_NO_FATAL_FAILURE(InitBlockGraph());
69 E : ASSERT_NO_FATAL_FAILURE(InitBasicBlockSubGraph());
70 E : ASSERT_NO_FATAL_FAILURE(SetEntryCounts(0, 0, 0, 0, 0, 0, 0, 0));
71 : orderer_.reset(new TestBasicBlockOrderer(subgraph_,
72 : start_addr_,
73 : assembly_func_->size(),
74 E : entry_counts_));
75 E : ASSERT_TRUE(orderer_.get() != NULL);
76 E : }
77 :
78 : RelativeAddressRange MakeRange(BlockGraph::Offset offset,
79 : BlockGraph::Size size) {
80 : return RelativeAddressRange(start_addr_ + offset, size);
81 : }
82 :
83 E : BasicBlock* FindBasicBlockAt(BlockGraph::Offset offset) {
84 : typedef BasicBlockSubGraph::BBCollection BBCollection;
85 : BasicBlockSubGraph::BBCollection::iterator it =
86 E : subgraph_.basic_blocks().begin();
87 E : for (; it != subgraph_.basic_blocks().end(); ++it) {
88 E : if ((*it)->offset() == offset)
89 E : return *it;
90 E : }
91 i : return NULL;
92 E : }
93 :
94 : void SetEntryCounts(uint32 bb0, uint32 bb1, uint32 bb2, uint32 bb3,
95 E : uint32 bb4, uint32 bb5, uint32 bb6, uint32 bb7) {
96 E : entry_counts_.num_entries = kNumCodeBasicBlocks;
97 E : entry_counts_.num_columns = 1;
98 : entry_counts_.data_type =
99 E : ::common::IndexedFrequencyData::BASIC_BLOCK_ENTRY;
100 E : entry_counts_.frequency_size = 4;
101 :
102 E : IndexedFrequencyMap& frequency_map = entry_counts_.frequency_map;
103 E : frequency_map.clear();
104 :
105 E : frequency_map[std::make_pair(start_addr_ + kBasicBlockOffsets[0], 0)] = bb0;
106 E : frequency_map[std::make_pair(start_addr_ + kBasicBlockOffsets[1], 0)] = bb1;
107 E : frequency_map[std::make_pair(start_addr_ + kBasicBlockOffsets[2], 0)] = bb2;
108 E : frequency_map[std::make_pair(start_addr_ + kBasicBlockOffsets[3], 0)] = bb3;
109 E : frequency_map[std::make_pair(start_addr_ + kBasicBlockOffsets[4], 0)] = bb4;
110 E : frequency_map[std::make_pair(start_addr_ + kBasicBlockOffsets[5], 0)] = bb5;
111 E : frequency_map[std::make_pair(start_addr_ + kBasicBlockOffsets[6], 0)] = bb6;
112 E : frequency_map[std::make_pair(start_addr_ + kBasicBlockOffsets[7], 0)] = bb7;
113 E : ASSERT_EQ(kNumCodeBasicBlocks, frequency_map.size());
114 E : }
115 :
116 : static const size_t kBasicBlockOffsets[kNumCodeBasicBlocks];
117 :
118 : IndexedFrequencyInformation entry_counts_;
119 : scoped_ptr<TestBasicBlockOrderer> orderer_;
120 : };
121 :
122 : const size_t BasicBlockOrdererTest::kBasicBlockOffsets[kNumCodeBasicBlocks] =
123 : { 0, 23, 24, 31, 36, 37, 42, 49 };
124 :
125 : class BasicBlockOptimizerTest : public testing::OrderGeneratorTest {
126 : public:
127 : typedef testing::OrderGeneratorTest Super;
128 :
129 : BasicBlockOptimizerTest()
130 : : num_decomposable_blocks_(0),
131 : num_non_decomposable_blocks_(0),
132 E : num_non_code_blocks_(0) {
133 E : }
134 :
135 E : virtual void SetUp() override {
136 E : ASSERT_NO_FATAL_FAILURE(Super::SetUp());
137 E : ASSERT_NO_FATAL_FAILURE(InitBlockCounts());
138 : base::FilePath pdb_path(GetExeTestDataRelativePath(
139 E : testing::kBBEntryInstrumentedTestDllPdbName));
140 E : }
141 :
142 E : void InitBlockCounts() {
143 E : ASSERT_EQ(0U, num_decomposable_blocks_);
144 E : ASSERT_EQ(0U, num_non_decomposable_blocks_);
145 E : ASSERT_EQ(0U, num_non_code_blocks_);
146 :
147 E : for (size_t i = 0; i < image_layout_.sections.size(); ++i) {
148 E : const ImageLayout::SectionInfo& section_info = image_layout_.sections[i];
149 : BlockGraph::AddressSpace::RangeMapConstIterPair ip =
150 : image_layout_.blocks.GetIntersectingBlocks(section_info.addr,
151 E : section_info.size);
152 E : for (; ip.first != ip.second; ++ip.first) {
153 E : const BlockGraph::Block* block = ip.first->second;
154 E : if (block->type() != BlockGraph::CODE_BLOCK) {
155 E : ++num_non_code_blocks_;
156 E : } else if (policy_.BlockIsSafeToBasicBlockDecompose(block)) {
157 E : ++num_decomposable_blocks_;
158 E : } else {
159 E : ++num_non_decomposable_blocks_;
160 : }
161 E : }
162 E : }
163 E : }
164 :
165 : bool FindBlockByName(const base::StringPiece& name,
166 : const BlockGraph::Block** block,
167 E : BlockGraph::AddressSpace::Range* range) {
168 E : DCHECK(block != NULL);
169 E : DCHECK(range != NULL);
170 : BlockGraph::AddressSpace::RangeMapConstIter it =
171 E : image_layout_.blocks.begin();
172 E : for (; it != image_layout_.blocks.end(); ++it) {
173 E : if (it->second->name() == name) {
174 E : *range = it->first;
175 E : *block = it->second;
176 E : return true;
177 : }
178 E : }
179 i : return false;
180 E : }
181 :
182 : protected:
183 : pe::PETransformPolicy policy_;
184 : BasicBlockOptimizer optimizer_;
185 : size_t num_decomposable_blocks_;
186 : size_t num_non_decomposable_blocks_;
187 : size_t num_non_code_blocks_;
188 : };
189 :
190 : } // namespace
191 :
192 E : TEST_F(BasicBlockOrdererTest, GetBlockEntryCount) {
193 E : ASSERT_NO_FATAL_FAILURE(SetEntryCounts(1, 0, 1, 5, 1, 0, 0, 0));
194 E : EntryCountType entry_count = 0;
195 E : EXPECT_TRUE(orderer_->GetBlockEntryCount(&entry_count));
196 E : EXPECT_EQ(1U, entry_count);
197 :
198 E : ASSERT_NO_FATAL_FAILURE(SetEntryCounts(17, 0, 1, 5, 1, 0, 0, 0));
199 E : EXPECT_TRUE(orderer_->GetBlockEntryCount(&entry_count));
200 E : EXPECT_EQ(17U, entry_count);
201 E : }
202 :
203 E : TEST_F(BasicBlockOrdererTest, GetWarmestSuccessor) {
204 E : const BasicCodeBlock* sub = BasicCodeBlock::Cast(FindBasicBlockAt(31));
205 E : ASSERT_TRUE(sub != NULL);
206 :
207 E : const BasicCodeBlock* ret = BasicCodeBlock::Cast(FindBasicBlockAt(36));
208 E : ASSERT_TRUE(ret != NULL);
209 :
210 E : TestBasicBlockOrderer::BasicBlockSet placed_bbs;
211 E : const BasicBlock* succ = NULL;
212 :
213 : // Make the fall-through as the warmest successor.
214 E : ASSERT_NO_FATAL_FAILURE(SetEntryCounts(0, 0, 0, 5, 10, 0, 0, 0));
215 E : ASSERT_TRUE(orderer_->GetWarmestSuccessor(sub, placed_bbs, &succ));
216 E : ASSERT_EQ(ret, succ);
217 :
218 : // Make the branch taken as the warmest successor.
219 E : succ = NULL;
220 E : ASSERT_NO_FATAL_FAILURE(SetEntryCounts(0, 0, 0, 10, 5, 0, 0, 0));
221 E : ASSERT_TRUE(orderer_->GetWarmestSuccessor(sub, placed_bbs, &succ));
222 E : ASSERT_EQ(sub, succ);
223 :
224 : // Give both branches the same warmth. Should preserve ordering.
225 E : succ = NULL;
226 E : ASSERT_NO_FATAL_FAILURE(SetEntryCounts(0, 0, 0, 10, 10, 0, 0, 0));
227 E : ASSERT_TRUE(orderer_->GetWarmestSuccessor(sub, placed_bbs, &succ));
228 E : ASSERT_EQ(ret, succ);
229 :
230 : // Let the warmest branch already be placed, should return the other branch.
231 E : succ = NULL;
232 E : placed_bbs.insert(ret);
233 E : ASSERT_NO_FATAL_FAILURE(SetEntryCounts(0, 0, 0, 5, 10, 0, 0, 0));
234 E : ASSERT_TRUE(orderer_->GetWarmestSuccessor(sub, placed_bbs, &succ));
235 E : ASSERT_EQ(sub, succ);
236 :
237 : // Let the warmest branch already be placed, should return the other branch.
238 : // Note that we initialize succ to non NULL to verify that it becomes NULL.
239 E : succ = sub;
240 E : placed_bbs.insert(sub);
241 E : placed_bbs.insert(ret);
242 E : ASSERT_NO_FATAL_FAILURE(SetEntryCounts(0, 0, 0, 5, 10, 0, 0, 0));
243 E : ASSERT_TRUE(orderer_->GetWarmestSuccessor(sub, placed_bbs, &succ));
244 E : ASSERT_EQ(NULL, succ);
245 E : }
246 :
247 E : TEST_F(BasicBlockOrdererTest, AddWarmDataReferences) {
248 : // Get basic block pointers to the switch, jump table, and case table.
249 E : const BasicCodeBlock* code_bb = BasicCodeBlock::Cast(FindBasicBlockAt(0));
250 E : const BasicDataBlock* jump_table = BasicDataBlock::Cast(FindBasicBlockAt(52));
251 E : const BasicDataBlock* case_table = BasicDataBlock::Cast(FindBasicBlockAt(64));
252 E : ASSERT_TRUE(code_bb != NULL);
253 E : ASSERT_TRUE(jump_table != NULL);
254 E : ASSERT_TRUE(case_table != NULL);
255 :
256 : // Capture the references from the switch basic block (offset 0).
257 E : TestBasicBlockOrderer::BasicBlockSet references;
258 E : ASSERT_TRUE(orderer_->AddWarmDataReferences(code_bb, &references));
259 E : EXPECT_EQ(2U, references.size());
260 E : EXPECT_EQ(1U, references.count(jump_table));
261 E : EXPECT_EQ(1U, references.count(case_table));
262 :
263 : // Capture the references from the case_0 basic block (offset 24).
264 E : references.clear();
265 E : code_bb = BasicCodeBlock::Cast(FindBasicBlockAt(24));
266 E : ASSERT_TRUE(orderer_->AddWarmDataReferences(code_bb, &references));
267 E : EXPECT_TRUE(references.empty());
268 E : }
269 :
270 E : TEST_F(BasicBlockOrdererTest, GetSortedJumpTargets) {
271 E : ASSERT_NO_FATAL_FAILURE(SetEntryCounts(0, 0, 2, 0, 0, 1, 3, 0));
272 E : const BasicCodeBlock* first_bb = BasicCodeBlock::Cast(FindBasicBlockAt(0));
273 E : ASSERT_TRUE(first_bb->successors().empty());
274 E : ASSERT_TRUE(!first_bb->instructions().empty());
275 E : const block_graph::Instruction& jmp_inst = first_bb->instructions().back();
276 E : ASSERT_EQ(I_JMP, jmp_inst.representation().opcode);
277 E : logging::SetMinLogLevel(-1);
278 E : std::vector<const BasicCodeBlock*> targets;
279 E : ASSERT_TRUE(orderer_->GetSortedJumpTargets(jmp_inst, &targets));
280 : ASSERT_THAT(targets,
281 : testing::ElementsAre(
282 : BasicCodeBlock::Cast(FindBasicBlockAt(42)),
283 : BasicCodeBlock::Cast(FindBasicBlockAt(24)),
284 E : BasicCodeBlock::Cast(FindBasicBlockAt(37))));
285 E : }
286 :
287 E : TEST_F(BasicBlockOrdererTest, GetStableSortedJumpTargets) {
288 E : ASSERT_NO_FATAL_FAILURE(SetEntryCounts(0, 0, 1, 0, 0, 2, 1, 0));
289 E : const BasicCodeBlock* first_bb = BasicCodeBlock::Cast(FindBasicBlockAt(0));
290 E : ASSERT_TRUE(first_bb->successors().empty());
291 E : ASSERT_TRUE(!first_bb->instructions().empty());
292 E : const block_graph::Instruction& jmp_inst = first_bb->instructions().back();
293 E : ASSERT_EQ(I_JMP, jmp_inst.representation().opcode);
294 E : logging::SetMinLogLevel(-1);
295 E : std::vector<const BasicCodeBlock*> targets;
296 E : ASSERT_TRUE(orderer_->GetSortedJumpTargets(jmp_inst, &targets));
297 : ASSERT_THAT(targets,
298 : testing::ElementsAre(
299 : BasicCodeBlock::Cast(FindBasicBlockAt(37)),
300 : BasicCodeBlock::Cast(FindBasicBlockAt(24)),
301 E : BasicCodeBlock::Cast(FindBasicBlockAt(42))));
302 E : }
303 :
304 E : TEST_F(BasicBlockOrdererTest, HotColdSeparation) {
305 E : ASSERT_NO_FATAL_FAILURE(SetEntryCounts(1, 0, 1, 5, 1, 0, 0, 0));
306 E : Order::OffsetVector warm;
307 E : Order::OffsetVector cold;
308 E : ASSERT_TRUE(orderer_->GetBasicBlockOrderings(&warm, &cold));
309 : // Note that the bb's at 52 and 64 are the jump and case tables, respectively.
310 E : EXPECT_THAT(warm, testing::ElementsAre(0, 24, 31, 36, 52, 64));
311 E : EXPECT_THAT(cold, testing::ElementsAre(23, 37, 42, 49));
312 E : }
313 :
314 E : TEST_F(BasicBlockOrdererTest, PathStraightening) {
315 : // The default control flow of the block we construct isn't very interesting
316 : // from a path straightening perspective. So, we modify it here such that the
317 : // jnz instruction the end of the basic block at offset 31 branches to case_1
318 : // (at offset 37), and then give that basic-block an elevated entry count.
319 E : BasicCodeBlock* case_1 = BasicCodeBlock::Cast(FindBasicBlockAt(37));
320 E : ASSERT_TRUE(case_1 != NULL);
321 E : ASSERT_EQ(1U, case_1->instructions().size());
322 E : ASSERT_EQ(I_CALL, case_1->instructions().front().representation().opcode);
323 :
324 E : BasicCodeBlock* jnz_bb = BasicCodeBlock::Cast(FindBasicBlockAt(31));
325 E : ASSERT_TRUE(jnz_bb != NULL);
326 E : ASSERT_EQ(1U, jnz_bb->instructions().size());
327 E : ASSERT_EQ(I_SUB, jnz_bb->instructions().front().representation().opcode);
328 E : ASSERT_EQ(2U, jnz_bb->successors().size());
329 : ASSERT_EQ(block_graph::Successor::kConditionNotEqual,
330 E : jnz_bb->successors().front().condition());
331 : jnz_bb->successors().front().set_reference(
332 E : block_graph::BasicBlockReference(BlockGraph::PC_RELATIVE_REF, 1, case_1));
333 :
334 : // Setup the entry counts such that the jump table stays in the same order
335 : // but case 1 is promoted to follow the jnz basic block.
336 E : ASSERT_NO_FATAL_FAILURE(SetEntryCounts(1, 0, 10, 5, 1, 7, 0, 0));
337 E : Order::OffsetVector warm;
338 E : Order::OffsetVector cold;
339 E : ASSERT_TRUE(orderer_->GetBasicBlockOrderings(&warm, &cold));
340 : // Note that the bb's at 52 and 64 are the jump and case tables, respectively.
341 E : EXPECT_THAT(warm, testing::ElementsAre(0, 24, 31, 37, 36, 52, 64));
342 E : EXPECT_THAT(cold, testing::ElementsAre(42, 23, 49));
343 E : }
344 :
345 E : TEST_F(BasicBlockOrdererTest, PathStraighteningAcrossJumpTable) {
346 : // Setup the entry counts such that case 1 (at offset 37) is promoted to be
347 : // the warmest path through the jump table.
348 E : ASSERT_NO_FATAL_FAILURE(SetEntryCounts(1, 0, 1, 5, 1, 7, 0, 0));
349 E : Order::OffsetVector warm;
350 E : Order::OffsetVector cold;
351 E : ASSERT_TRUE(orderer_->GetBasicBlockOrderings(&warm, &cold));
352 : // Note that the bb's at 52 and 64 are the jump and case tables, respectively.
353 E : EXPECT_THAT(warm, testing::ElementsAre(0, 37, 24, 31, 36, 52, 64));
354 E : EXPECT_THAT(cold, testing::ElementsAre(42, 23, 49));
355 E : }
356 :
357 E : TEST_F(BasicBlockOptimizerTest, Accessors) {
358 E : const std::string kSectionName(".froboz");
359 E : EXPECT_TRUE(!optimizer_.cold_section_name().empty());
360 E : EXPECT_NE(kSectionName, optimizer_.cold_section_name());
361 E : optimizer_.set_cold_section_name(kSectionName);
362 E : EXPECT_EQ(kSectionName, optimizer_.cold_section_name());
363 E : }
364 :
365 E : TEST_F(BasicBlockOptimizerTest, EmptyOrderingAllCold) {
366 E : Order order;
367 E : IndexedFrequencyInformation entry_counts;
368 E : entry_counts.num_entries = 0;
369 E : entry_counts.num_columns = 1;
370 E : entry_counts.data_type = ::common::IndexedFrequencyData::BASIC_BLOCK_ENTRY;
371 E : entry_counts.frequency_size = 4;
372 :
373 : ASSERT_TRUE(
374 E : optimizer_.Optimize(image_layout_, entry_counts, &order));
375 :
376 E : EXPECT_EQ(image_layout_.sections.size() + 1, order.sections.size());
377 E : EXPECT_EQ(optimizer_.cold_section_name(), order.sections.back().name);
378 E : EXPECT_EQ(Order::SectionSpec::kNewSectionId, order.sections.back().id);
379 E : EXPECT_EQ(pe::kCodeCharacteristics, order.sections.back().characteristics);
380 :
381 : // Count the blocks left in the original sections. This should only include
382 : // non-code blocks.
383 E : size_t num_non_code_blocks = 0;
384 E : for (size_t i = 0; i < image_layout_.sections.size(); ++i) {
385 E : for (size_t k = 0; k < order.sections[i].blocks.size(); ++k) {
386 E : const BlockGraph::Block* block = order.sections[i].blocks[k].block;
387 E : ASSERT_TRUE(block != NULL);
388 E : ASSERT_NE(BlockGraph::CODE_BLOCK, block->type());
389 E : ++num_non_code_blocks;
390 E : }
391 E : }
392 :
393 : // Validate that we have the expected numbers of blocks.
394 E : EXPECT_EQ(num_non_code_blocks_, num_non_code_blocks);
395 : EXPECT_EQ(num_decomposable_blocks_ + num_non_decomposable_blocks_,
396 E : order.sections.back().blocks.size());
397 E : for (size_t i = 0; i < order.sections.back().blocks.size(); ++i) {
398 E : EXPECT_TRUE(order.sections.back().blocks[i].basic_block_offsets.empty());
399 E : }
400 E : }
401 :
402 E : TEST_F(BasicBlockOptimizerTest, HotCold) {
403 : // This test does a simple manipulation of the entry counts for DllMain and
404 : // validates that some minimum number of its blocks get moved into the cold
405 : // section. We defer to the BasicBlockOrdererTest instances above for the
406 : // details Hot/Cold and Path Straightening tests.
407 E : const BlockGraph::Block* dllmain = NULL;
408 E : BlockGraph::AddressSpace::Range range;
409 E : ASSERT_TRUE(FindBlockByName("DllMain", &dllmain, &range));
410 E : ASSERT_TRUE(dllmain != NULL);
411 :
412 : using block_graph::BasicBlockSubGraph;
413 : using block_graph::BasicBlockDecomposer;
414 :
415 E : BasicBlockSubGraph subgraph;
416 E : BasicBlockDecomposer decomposer(dllmain, &subgraph);
417 E : ASSERT_TRUE(decomposer.Decompose());
418 E : ASSERT_EQ(1U, subgraph.block_descriptions().size());
419 :
420 : // Generate an entry count map with a non-zero count for every other BB.
421 E : IndexedFrequencyInformation entry_counts;
422 E : entry_counts.num_entries = 0;
423 E : entry_counts.num_columns = 1;
424 E : entry_counts.data_type = ::common::IndexedFrequencyData::BASIC_BLOCK_ENTRY;
425 E : entry_counts.frequency_size = 4;
426 E : IndexedFrequencyMap& frequency_map = entry_counts.frequency_map;
427 :
428 : const BasicBlockSubGraph::BlockDescription& desc =
429 E : subgraph.block_descriptions().front();
430 : BasicBlockSubGraph::BasicBlockOrdering::const_iterator it(
431 E : desc.basic_block_order.begin());
432 E : size_t num_basic_blocks = desc.basic_block_order.size();
433 E : size_t num_hot_blocks = 0;
434 :
435 E : bool is_hot = true;
436 E : BlockGraph::RelativeAddress start_offs = subgraph.original_block()->addr();
437 E : for (; it != desc.basic_block_order.end(); ++it) {
438 E : if (is_hot && BasicCodeBlock::Cast(*it) != NULL) {
439 E : frequency_map[std::make_pair(start_offs + (*it)->offset(), 0)] = 1;
440 E : ++num_hot_blocks;
441 : }
442 :
443 : // Toggle hotness for next block.
444 E : is_hot = !is_hot;
445 E : }
446 :
447 : // Create an ordering that moves dllmain to a new section.
448 E : std::string section_name(".dllmain");
449 E : Order order;
450 E : order.sections.resize(1);
451 E : order.sections[0].id = Order::SectionSpec::kNewSectionId;
452 E : order.sections[0].name = section_name;
453 E : order.sections[0].characteristics = pe::kCodeCharacteristics;
454 E : order.sections[0].blocks.push_back(Order::BlockSpec(dllmain));
455 :
456 : ASSERT_TRUE(
457 E : optimizer_.Optimize(image_layout_, entry_counts, &order));
458 :
459 E : ASSERT_EQ(image_layout_.sections.size() + 2, order.sections.size());
460 E : ASSERT_EQ(section_name, order.sections[0].name);
461 E : ASSERT_EQ(1U, order.sections[0].blocks.size());
462 E : ASSERT_TRUE(!order.sections.back().blocks.empty());
463 E : ASSERT_EQ(dllmain, order.sections[0].blocks[0].block);
464 E : ASSERT_EQ(dllmain, order.sections.back().blocks[0].block);
465 : ASSERT_LE(num_hot_blocks,
466 E : order.sections[0].blocks[0].basic_block_offsets.size());
467 :
468 : // Since data BBs that are referred to by 'hot' code BBs also make
469 : // it into the hot BB list, there could be fewer cold blocks than expected.
470 : ASSERT_GE(num_basic_blocks - num_hot_blocks,
471 E : order.sections.back().blocks[0].basic_block_offsets.size());
472 E : }
473 :
474 : } // namespace reorder
|