1 : // Copyright 2012 Google Inc. All Rights Reserved.
2 : //
3 : // Licensed under the Apache License, Version 2.0 (the "License");
4 : // you may not use this file except in compliance with the License.
5 : // You may obtain a copy of the License at
6 : //
7 : // http://www.apache.org/licenses/LICENSE-2.0
8 : //
9 : // Unless required by applicable law or agreed to in writing, software
10 : // distributed under the License is distributed on an "AS IS" BASIS,
11 : // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 : // See the License for the specific language governing permissions and
13 : // limitations under the License.
14 :
15 : #include "syzygy/block_graph/block_graph.h"
16 :
17 : #include <limits>
18 :
19 : #include "base/logging.h"
20 : #include "base/stringprintf.h"
21 :
22 : namespace block_graph {
23 :
24 : namespace {
25 :
26 : COMPILE_ASSERT(BlockGraph::BLOCK_ATTRIBUTES_MAX_BIT < 32,
27 : too_many_block_attributes);
28 :
29 : // A list of printable names corresponding to block types. This needs to
30 : // be kept in sync with the BlockGraph::BlockType enum!
31 : const char* kBlockType[] = {
32 : "CODE_BLOCK", "DATA_BLOCK",
33 : };
34 : COMPILE_ASSERT(arraysize(kBlockType) == BlockGraph::BLOCK_TYPE_MAX,
35 : kBlockType_not_in_sync);
36 :
37 : // Shift all items in an offset -> item map by 'distance', provided the initial
38 : // item offset was >= @p offset.
39 : template<typename ItemType>
40 : void ShiftOffsetItemMap(BlockGraph::Offset offset,
41 : BlockGraph::Offset distance,
42 E : std::map<BlockGraph::Offset, ItemType>* items) {
43 E : DCHECK_GE(offset, 0);
44 E : DCHECK_NE(distance, 0);
45 E : DCHECK(items != NULL);
46 :
47 : typedef std::map<BlockGraph::Offset, ItemType> ItemMap;
48 :
49 : // Get iterators to all of the items that need changing.
50 E : std::vector<ItemMap::iterator> item_its;
51 E : ItemMap::iterator item_it = items->lower_bound(offset);
52 E : while (item_it != items->end()) {
53 E : item_its.push_back(item_it);
54 E : ++item_it;
55 E : }
56 :
57 : // Get the direction and bounds of the iteration. We need to walk through
58 : // the iterators in a different order depending on if we're shifting left
59 : // or right. This is to ensure that earlier shifts don't land on the values
60 : // of later unshifted offsets.
61 E : int start = 0;
62 E : int stop = item_its.size();
63 E : int step = 1;
64 E : if (distance > 0) {
65 E : start = stop - 1;
66 E : stop = -1;
67 E : step = -1;
68 : }
69 :
70 E : for (int i = start; i != stop; i += step) {
71 E : item_it = item_its[i];
72 : items->insert(std::make_pair(item_it->first + distance,
73 E : item_it->second));
74 E : items->erase(item_it);
75 E : }
76 E : }
77 :
78 : void ShiftReferences(BlockGraph::Block* block,
79 : BlockGraph::Offset offset,
80 E : BlockGraph::Offset distance) {
81 : // Make a copy of the reference map for simplicity.
82 E : BlockGraph::Block::ReferenceMap references = block->references();
83 :
84 : // Start by removing all references that have moved.
85 : BlockGraph::Block::ReferenceMap::const_iterator it =
86 E : references.lower_bound(offset);
87 E : for (; it != references.end(); ++it) {
88 E : if (it->first >= offset)
89 E : block->RemoveReference(it->first);
90 E : }
91 :
92 : // Then patch up all existing references.
93 E : it = references.begin();
94 E : for (; it != references.end(); ++it) {
95 E : BlockGraph::Reference ref(it->second);
96 E : BlockGraph::Offset new_offset(it->first);
97 :
98 : // If this is self-referential, fix the destination offset.
99 E : if (ref.referenced() == block && ref.offset() >= offset) {
100 : ref = BlockGraph::Reference(ref.type(),
101 : ref.size(),
102 : ref.referenced(),
103 : ref.offset() + distance,
104 E : ref.base() + distance);
105 : }
106 :
107 : // If its offset is past the change point, fix that.
108 E : if (it->first >= offset)
109 E : new_offset += distance;
110 :
111 : // In many cases this'll be a noop.
112 : // TODO(siggi): Optimize this.
113 E : block->SetReference(new_offset, ref);
114 E : }
115 E : }
116 :
117 : // Shift all referrers beyond @p offset by @p distance.
118 : void ShiftReferrers(BlockGraph::Block* self,
119 : BlockGraph::Offset offset,
120 : BlockGraph::Offset distance,
121 E : BlockGraph::Block::ReferrerSet* referrers) {
122 E : DCHECK_GE(offset, 0);
123 E : DCHECK_NE(distance, 0);
124 E : DCHECK(referrers != NULL);
125 :
126 : typedef BlockGraph::Block::ReferrerSet ReferrerSet;
127 : typedef BlockGraph::Reference Reference;
128 :
129 E : ReferrerSet::iterator ref_it = referrers->begin();
130 E : while (ref_it != referrers->end()) {
131 : // We need to keep around the next iterator as 'ref_it' will be invalidated
132 : // if we need to update the reference. (It will be deleted and then
133 : // recreated.)
134 E : ReferrerSet::iterator next_ref_it = ref_it;
135 E : ++next_ref_it;
136 :
137 E : BlockGraph::Block* ref_block = ref_it->first;
138 : // Our own references will have been moved already.
139 E : if (ref_block != self) {
140 E : BlockGraph::Offset ref_offset = ref_it->second;
141 :
142 E : Reference ref;
143 E : bool ref_found = ref_block->GetReference(ref_offset, &ref);
144 E : DCHECK(ref_found);
145 :
146 : // Shift the reference if need be.
147 E : if (ref.offset() >= offset) {
148 : Reference new_ref(ref.type(),
149 : ref.size(),
150 : ref.referenced(),
151 : ref.offset() + distance,
152 E : ref.base() + distance);
153 E : bool inserted = ref_block->SetReference(ref_offset, new_ref);
154 E : DCHECK(!inserted);
155 : }
156 : }
157 :
158 E : ref_it = next_ref_it;
159 E : }
160 E : }
161 :
162 i : const char* BlockAttributeToString(BlockGraph::BlockAttributeEnum attr) {
163 i : switch (attr) {
164 : #define DEFINE_CASE(name) case BlockGraph::name: return #name;
165 i : BLOCK_ATTRIBUTE_ENUM(DEFINE_CASE)
166 : #undef DEFINE_CASE
167 : default:
168 i : NOTREACHED();
169 i : return NULL;
170 : }
171 i : }
172 :
173 : } // namespace
174 :
175 i : std::string BlockGraph::BlockAttributesToString(BlockAttributes attrs) {
176 i : BlockAttributes attr = 1;
177 i : std::string s;
178 i : for (; attr < BLOCK_ATTRIBUTES_MAX; attr <<= 1) {
179 i : if (attr & attrs) {
180 i : if (!s.empty())
181 i : s.append("|");
182 i : s.append(BlockAttributeToString(static_cast<BlockAttributeEnum>(attr)));
183 : }
184 i : }
185 i : return s;
186 i : }
187 :
188 E : const char* BlockGraph::BlockTypeToString(BlockGraph::BlockType type) {
189 E : DCHECK_LE(BlockGraph::CODE_BLOCK, type);
190 E : DCHECK_GT(BlockGraph::BLOCK_TYPE_MAX, type);
191 E : return kBlockType[type];
192 E : }
193 :
194 : std::string BlockGraph::LabelAttributesToString(
195 E : BlockGraph::LabelAttributes label_attributes) {
196 : static const char* kLabelAttributes[] = {
197 : "Code", "DebugStart", "DebugEnd", "ScopeStart", "ScopeEnd",
198 : "CallSite", "JumpTable", "CaseTable", "Data", "PublicSymbol" };
199 : COMPILE_ASSERT((1 << arraysize(kLabelAttributes)) == LABEL_ATTRIBUTES_MAX,
200 : label_attribute_names_not_in_sync_with_enum);
201 :
202 E : std::string s;
203 E : for (size_t i = 0; i < arraysize(kLabelAttributes); ++i) {
204 E : if (label_attributes & (1 << i)) {
205 E : if (!s.empty())
206 E : s.append("|");
207 E : s.append(kLabelAttributes[i]);
208 : }
209 E : }
210 E : return s;
211 E : }
212 :
213 : const BlockGraph::SectionId BlockGraph::kInvalidSectionId = -1;
214 :
215 : BlockGraph::BlockGraph()
216 : : next_section_id_(0),
217 E : next_block_id_(0) {
218 E : }
219 :
220 E : BlockGraph::~BlockGraph() {
221 E : }
222 :
223 : BlockGraph::Section* BlockGraph::AddSection(const base::StringPiece& name,
224 E : uint32 characteristics) {
225 E : Section new_section(next_section_id_++, name, characteristics);
226 : std::pair<SectionMap::iterator, bool> result = sections_.insert(
227 E : std::make_pair(new_section.id(), new_section));
228 E : DCHECK(result.second);
229 :
230 E : return &result.first->second;
231 E : }
232 :
233 E : BlockGraph::Section* BlockGraph::FindSection(const base::StringPiece& name) {
234 E : const BlockGraph* self = const_cast<const BlockGraph*>(this);
235 E : const BlockGraph::Section* section = self->FindSection(name);
236 E : return const_cast<BlockGraph::Section*>(section);
237 E : }
238 :
239 : const BlockGraph::Section* BlockGraph::FindSection(
240 E : const base::StringPiece& name) const {
241 : // This is a linear scan, but thankfully images generally do not have many
242 : // sections and we do not create them very often. Fast lookup by index is
243 : // more important. If this ever becomes an issue, we could keep around a
244 : // second index by name.
245 E : SectionMap::const_iterator it = sections_.begin();
246 E : for (; it != sections_.end(); ++it) {
247 E : if (it->second.name() == name)
248 E : return &it->second;
249 E : }
250 :
251 E : return NULL;
252 E : }
253 :
254 : BlockGraph::Section* BlockGraph::FindOrAddSection(const base::StringPiece& name,
255 E : uint32 characteristics) {
256 E : Section* section = FindSection(name);
257 E : if (section) {
258 E : section->set_characteristic(characteristics);
259 E : return section;
260 : }
261 E : return AddSection(name, characteristics);
262 E : }
263 :
264 E : bool BlockGraph::RemoveSection(Section* section) {
265 E : DCHECK(section != NULL);
266 :
267 E : SectionMap::iterator it(sections_.find(section->id()));
268 E : if (it == sections_.end() || &it->second != section)
269 i : return false;
270 :
271 E : sections_.erase(it);
272 E : return true;
273 E : }
274 :
275 E : bool BlockGraph::RemoveSectionById(SectionId id) {
276 E : SectionMap::iterator it(sections_.find(id));
277 E : if (it == sections_.end())
278 E : return false;
279 :
280 E : sections_.erase(it);
281 E : return true;
282 E : }
283 :
284 : BlockGraph::Block* BlockGraph::AddBlock(BlockType type,
285 : Size size,
286 E : const base::StringPiece& name) {
287 E : BlockId id = ++next_block_id_;
288 : BlockMap::iterator it = blocks_.insert(
289 E : std::make_pair(id, Block(id, type, size, name, this))).first;
290 :
291 E : return &it->second;
292 E : }
293 :
294 E : bool BlockGraph::RemoveBlock(Block* block) {
295 E : DCHECK(block != NULL);
296 :
297 E : BlockMap::iterator it(blocks_.find(block->id()));
298 E : if (it == blocks_.end() || &it->second != block)
299 E : return false;
300 :
301 E : return RemoveBlockByIterator(it);
302 E : }
303 :
304 E : bool BlockGraph::RemoveBlockById(BlockId id) {
305 E : BlockMap::iterator it(blocks_.find(id));
306 E : if (it == blocks_.end())
307 E : return false;
308 :
309 E : return RemoveBlockByIterator(it);
310 E : }
311 :
312 E : BlockGraph::Section* BlockGraph::GetSectionById(SectionId id) {
313 E : SectionMap::iterator it(sections_.find(id));
314 :
315 E : if (it == sections_.end())
316 E : return NULL;
317 :
318 E : return &it->second;
319 E : }
320 :
321 E : const BlockGraph::Section* BlockGraph::GetSectionById(SectionId id) const {
322 E : SectionMap::const_iterator it(sections_.find(id));
323 :
324 E : if (it == sections_.end())
325 E : return NULL;
326 :
327 E : return &it->second;
328 E : }
329 :
330 E : BlockGraph::Block* BlockGraph::GetBlockById(BlockId id) {
331 E : BlockMap::iterator it(blocks_.find(id));
332 :
333 E : if (it == blocks_.end())
334 E : return NULL;
335 :
336 E : return &it->second;
337 E : }
338 :
339 : const BlockGraph::Block* BlockGraph::GetBlockById(BlockId id) const {
340 : BlockMap::const_iterator it(blocks_.find(id));
341 :
342 : if (it == blocks_.end())
343 : return NULL;
344 :
345 : return &it->second;
346 : }
347 :
348 E : bool BlockGraph::RemoveBlockByIterator(BlockMap::iterator it) {
349 E : DCHECK(it != blocks_.end());
350 :
351 : // Verify this block is fully disconnected.
352 E : if (it->second.referrers().size() > 0 || it->second.references().size() > 0)
353 E : return false;
354 :
355 E : blocks_.erase(it);
356 :
357 E : return true;
358 E : }
359 :
360 : BlockGraph::AddressSpace::AddressSpace(BlockGraph* graph)
361 E : : graph_(graph) {
362 E : DCHECK(graph != NULL);
363 E : }
364 :
365 : BlockGraph::Block* BlockGraph::AddressSpace::AddBlock(
366 : BlockType type, RelativeAddress addr, Size size,
367 E : const base::StringPiece& name) {
368 :
369 E : if (size > 0) {
370 : // First check to see that the range is clear.
371 E : AddressSpaceImpl::Range range(addr, size);
372 : AddressSpaceImpl::RangeMap::iterator it =
373 E : address_space_.FindFirstIntersection(range);
374 E : if (it != address_space_.ranges().end())
375 E : return NULL;
376 : }
377 :
378 E : BlockGraph::Block* block = graph_->AddBlock(type, size, name);
379 E : DCHECK(block != NULL);
380 E : bool inserted = InsertImpl(addr, block);
381 E : DCHECK(inserted);
382 :
383 E : return block;
384 E : }
385 :
386 E : bool BlockGraph::AddressSpace::ResizeBlock(Block* block, size_t size) {
387 E : DCHECK_NE(reinterpret_cast<Block*>(NULL), block);
388 :
389 : // Determine if the block is in the address space. If so, this also finds its
390 : // address.
391 E : BlockAddressMap::iterator block_it = block_addresses_.find(block);
392 E : if (block_it == block_addresses_.end())
393 E : return false;
394 :
395 : // If the size isn't changing then we can return right away.
396 E : if (block->size() == size)
397 E : return true;
398 :
399 : // If the original size is non-zero we expect it to be in the address-space.
400 E : AddressSpaceImpl::RangeMap::iterator range_it;
401 E : if (block->size() > 0) {
402 : // Find the iterator associated with this blocks range.
403 E : AddressSpaceImpl::Range range(block_it->second, 1);
404 E : range_it = address_space_.FindContaining(range);
405 E : CHECK(range_it != address_space_.end());
406 :
407 : // We expect the sizes to agree.
408 E : CHECK_EQ(block->size(), range_it->first.size());
409 :
410 : // If the size is growing make sure we can grow it.
411 E : if (range_it->first.size() < size) {
412 : // Get the next block.
413 E : AddressSpaceImpl::RangeMap::iterator range_next = range_it;
414 E : ++range_next;
415 :
416 : // If the current block extended to the new size conflicts with the old
417 : // block then we can't do the resize.
418 : if (range_next != address_space_.end() &&
419 E : range_it->first.start() + size > range_next->first.start()) {
420 E : return false;
421 : }
422 : }
423 :
424 : // Remove the existing block from the address-space.
425 E : address_space_.Remove(range_it);
426 : }
427 :
428 : // Reinsert the block with its new range if its not empty. If the block is
429 : // empty it won't be added back to the address space.
430 E : if (size > 0)
431 E : address_space_.Insert(Range(block_it->second, size), block, &range_it);
432 :
433 : // Update the block with its new size.
434 E : block->set_size(size);
435 :
436 E : return true;
437 E : }
438 :
439 E : bool BlockGraph::AddressSpace::InsertBlock(RelativeAddress addr, Block* block) {
440 E : return InsertImpl(addr, block);
441 E : }
442 :
443 : BlockGraph::Block* BlockGraph::AddressSpace::GetBlockByAddress(
444 E : RelativeAddress addr) const {
445 E : return GetContainingBlock(addr, 1);
446 E : }
447 :
448 : BlockGraph::Block* BlockGraph::AddressSpace::GetContainingBlock(
449 E : RelativeAddress addr, Size size) const {
450 E : AddressSpaceImpl::Range range(addr, size);
451 : AddressSpaceImpl::RangeMap::const_iterator it =
452 E : address_space_.FindContaining(range);
453 E : if (it == address_space_.ranges().end())
454 E : return NULL;
455 :
456 E : return it->second;
457 E : }
458 :
459 : BlockGraph::Block* BlockGraph::AddressSpace::GetFirstIntersectingBlock(
460 E : RelativeAddress addr, Size size) {
461 E : AddressSpaceImpl::Range range(addr, size);
462 : AddressSpaceImpl::RangeMap::iterator it =
463 E : address_space_.FindFirstIntersection(range);
464 E : if (it == address_space_.ranges().end())
465 E : return NULL;
466 :
467 E : return it->second;
468 E : }
469 :
470 : BlockGraph::AddressSpace::RangeMapConstIterPair
471 : BlockGraph::AddressSpace::GetIntersectingBlocks(RelativeAddress address,
472 E : Size size) const {
473 E : return address_space_.FindIntersecting(Range(address, size));
474 E : }
475 :
476 : BlockGraph::AddressSpace::RangeMapIterPair
477 : BlockGraph::AddressSpace::GetIntersectingBlocks(RelativeAddress address,
478 E : Size size) {
479 E : return address_space_.FindIntersecting(Range(address, size));
480 E : }
481 :
482 : bool BlockGraph::AddressSpace::GetAddressOf(const Block* block,
483 E : RelativeAddress* addr) const {
484 E : DCHECK(block != NULL);
485 E : DCHECK(addr != NULL);
486 :
487 E : BlockAddressMap::const_iterator it(block_addresses_.find(block));
488 E : if (it == block_addresses_.end())
489 E : return false;
490 :
491 E : *addr = it->second;
492 E : return true;
493 E : }
494 :
495 E : bool BlockGraph::AddressSpace::InsertImpl(RelativeAddress addr, Block* block) {
496 : // Only insert the block in the address-space if it has non-zero size.
497 E : if (block->size() > 0) {
498 E : Range range(addr, block->size());
499 E : bool inserted = address_space_.Insert(range, block);
500 E : if (!inserted)
501 E : return false;
502 : }
503 :
504 E : bool inserted = block_addresses_.insert(std::make_pair(block, addr)).second;
505 E : DCHECK(inserted);
506 : // Update the address stored in the block.
507 E : block->set_addr(addr);
508 :
509 E : return true;
510 E : }
511 :
512 E : bool BlockGraph::AddressSpace::ContainsBlock(const Block* block) {
513 E : DCHECK(block != NULL);
514 E : return block_addresses_.count(block) != 0;
515 E : }
516 :
517 : BlockGraph::Block* BlockGraph::AddressSpace::MergeIntersectingBlocks(
518 E : const Range& range) {
519 : typedef std::vector<std::pair<RelativeAddress, BlockGraph::Block*>>
520 : BlockAddressVector;
521 :
522 : // Find all the blocks that intersect the range, keep them and their
523 : // addresses. Start by finding the first intersection, then iterate
524 : // from there until we find a block that doesn't intersect with range.
525 : AddressSpaceImpl::RangeMap::iterator address_start =
526 E : address_space_.FindFirstIntersection(range);
527 E : AddressSpaceImpl::RangeMap::iterator address_it(address_start);
528 :
529 E : BlockAddressVector intersecting;
530 : for (; address_it != address_space_.ranges().end() &&
531 E : address_it->first.Intersects(range); ++address_it) {
532 : intersecting.push_back(std::make_pair(address_it->first.start(),
533 E : address_it->second));
534 E : }
535 :
536 : // Bail if the intersection doesn't cover at least two blocks.
537 E : if (intersecting.empty())
538 i : return NULL;
539 :
540 : // In case of single-block intersection, we're done.
541 E : if (intersecting.size() == 1)
542 i : return intersecting[0].second;
543 :
544 E : DCHECK(!intersecting.empty());
545 :
546 : // Calculate the start and end addresses of the new block.
547 E : BlockGraph::Block* first_block = intersecting[0].second;
548 E : BlockGraph::Block* last_block = intersecting[intersecting.size() - 1].second;
549 E : DCHECK(first_block != NULL && last_block != NULL);
550 :
551 E : RelativeAddress begin = std::min(range.start(), intersecting[0].first);
552 : RelativeAddress end = std::max(range.start() + range.size(),
553 E : intersecting[intersecting.size() - 1].first + last_block->size());
554 :
555 E : DCHECK(begin <= range.start());
556 E : DCHECK(end >= range.start() + range.size());
557 :
558 E : base::StringPiece block_name = first_block->name();
559 E : BlockType block_type = first_block->type();
560 E : size_t section_id = first_block->section();
561 E : size_t alignment = first_block->alignment();
562 E : BlockAttributes attributes = 0;
563 :
564 : // Some attributes are only propagated if they are present on *all* blocks
565 : // in the range.
566 : static const BlockAttributes kUniformAttributes =
567 : GAP_BLOCK | PADDING_BLOCK | BUILT_BY_SYZYGY;
568 E : BlockAttributes uniform_attributes = kUniformAttributes;
569 :
570 E : BlockGraph::Block::SourceRanges source_ranges;
571 :
572 : // Remove the found blocks from the address space, and make sure they're all
573 : // of the same type and from the same section as the first block. Merge the
574 : // data from all the blocks as we go along, as well as the attributes and
575 : // source ranges.
576 E : std::vector<uint8> merged_data(end - begin);
577 E : bool have_data = false;
578 E : for (size_t i = 0; i < intersecting.size(); ++i) {
579 E : RelativeAddress addr = intersecting[i].first;
580 E : BlockGraph::Block* block = intersecting[i].second;
581 E : DCHECK_EQ(block_type, block->type());
582 E : DCHECK_EQ(section_id, block->section());
583 :
584 E : if (block->data() != NULL) {
585 E : have_data = true;
586 E : memcpy(&merged_data.at(addr - begin), block->data(), block->data_size());
587 : }
588 :
589 : // Add any non-uniform attributes to the block, and keep track of the
590 : // uniform attributes.
591 E : attributes |= block->attributes() & ~kUniformAttributes;
592 E : uniform_attributes &= block->attributes();
593 :
594 : // Merge in the source ranges from each block.
595 E : BlockGraph::Offset block_offset = addr - begin;
596 : BlockGraph::Block::SourceRanges::RangePairs::const_iterator src_it =
597 E : block->source_ranges().range_pairs().begin();
598 E : for (; src_it != block->source_ranges().range_pairs().end(); ++src_it) {
599 : // The data range is wrt to the containing block, wo we have to translate
600 : // each individual block's offset to an offset in the merged block.
601 E : BlockGraph::Offset merged_offset = block_offset + src_it->first.start();
602 : bool pushed = source_ranges.Push(
603 : BlockGraph::Block::DataRange(merged_offset, src_it->first.size()),
604 E : src_it->second);
605 E : DCHECK(pushed);
606 E : }
607 :
608 E : bool removed = address_space_.Remove(Range(addr, block->size()));
609 E : DCHECK(removed);
610 E : size_t num_removed = block_addresses_.erase(intersecting[i].second);
611 E : DCHECK_EQ(1U, num_removed);
612 E : }
613 :
614 : // Create the new block.
615 : BlockGraph::Block* new_block = AddBlock(block_type,
616 : begin, end - begin,
617 E : block_name);
618 E : DCHECK(new_block != NULL);
619 :
620 : // Set the rest of the properties for the new block.
621 E : new_block->source_ranges() = source_ranges;
622 E : new_block->set_section(section_id);
623 E : new_block->set_alignment(alignment);
624 E : new_block->set_attributes(attributes | uniform_attributes);
625 E : if (have_data) {
626 E : uint8* data = new_block->CopyData(merged_data.size(), &merged_data.at(0));
627 E : if (data == NULL) {
628 i : LOG(ERROR) << "Unable to copy merged data";
629 i : return NULL;
630 : }
631 : }
632 :
633 : // Now move all labels and references to the new block.
634 E : for (size_t i = 0; i < intersecting.size(); ++i) {
635 E : RelativeAddress addr = intersecting[i].first;
636 E : BlockGraph::Block* block = intersecting[i].second;
637 E : BlockGraph::Offset start_offset = addr - begin;
638 :
639 : // If the destination block is not a code block, preserve the old block
640 : // names as labels for debugging. We also need to make sure the label is
641 : // not empty, as that is verboten.
642 E : if (block_type != BlockGraph::CODE_BLOCK && !block->name().empty()) {
643 : new_block->SetLabel(start_offset,
644 : block->name(),
645 E : BlockGraph::DATA_LABEL);
646 : }
647 :
648 : // Move labels.
649 : BlockGraph::Block::LabelMap::const_iterator
650 E : label_it(block->labels().begin());
651 E : for (; label_it != block->labels().end(); ++label_it) {
652 : new_block->SetLabel(start_offset + label_it->first,
653 E : label_it->second);
654 E : }
655 :
656 : // Copy the reference map since we mutate the original.
657 E : BlockGraph::Block::ReferenceMap refs(block->references());
658 E : BlockGraph::Block::ReferenceMap::const_iterator ref_it(refs.begin());
659 E : for (; ref_it != refs.end(); ++ref_it) {
660 E : block->RemoveReference(ref_it->first);
661 E : new_block->SetReference(start_offset + ref_it->first, ref_it->second);
662 E : }
663 :
664 : // Redirect all referrers to the new block.
665 : block->TransferReferrers(start_offset,
666 : new_block,
667 E : BlockGraph::Block::kTransferInternalReferences);
668 :
669 : // Check that we've removed all references and
670 : // referrers from the original block.
671 E : DCHECK(block->references().empty());
672 E : DCHECK(block->referrers().empty());
673 :
674 : // Remove the original block.
675 E : bool removed = graph_->RemoveBlock(block);
676 E : DCHECK(removed);
677 E : }
678 :
679 E : return new_block;
680 E : }
681 :
682 E : bool BlockGraph::Section::set_name(const base::StringPiece& name) {
683 E : if (name == NULL)
684 i : return false;
685 :
686 E : if (name.empty())
687 i : return false;
688 :
689 E : name.CopyToString(&name_);
690 E : return true;
691 E : }
692 :
693 E : bool BlockGraph::Section::Save(core::OutArchive* out_archive) const {
694 E : DCHECK(out_archive != NULL);
695 : return out_archive->Save(id_) && out_archive->Save(name_) &&
696 E : out_archive->Save(characteristics_);
697 E : }
698 :
699 E : bool BlockGraph::Section::Load(core::InArchive* in_archive) {
700 E : DCHECK(in_archive != NULL);
701 : return in_archive->Load(&id_) && in_archive->Load(&name_) &&
702 E : in_archive->Load(&characteristics_);
703 E : }
704 :
705 E : std::string BlockGraph::Label::ToString() const {
706 : return base::StringPrintf("%s (%s)",
707 : name_.c_str(),
708 E : LabelAttributesToString(attributes_).c_str());
709 E : }
710 :
711 E : bool BlockGraph::Label::IsValid() const {
712 E : return AreValidAttributes(attributes_);
713 E : }
714 :
715 E : bool BlockGraph::Label::AreValidAttributes(LabelAttributes attributes) {
716 : // A label needs to have at least one attribute.
717 E : if (attributes == 0)
718 E : return false;
719 :
720 : // TODO(chrisha): Once we make the switch to VS2010 determine where call
721 : // site labels may land. Are they at the beginning of the call
722 : // instruction (in which case they may coincide with *_START_LABEL,
723 : // *_END_LABEL and CODE_LABEL), or do they point at the address of the
724 : // call (in which case they must be completely on their own)? For now, we
725 : // simply ignore them entirely from consideration.
726 E : attributes &= ~CALL_SITE_LABEL;
727 :
728 : // Public symbols can coincide with anything, so we can basically ignore
729 : // them.
730 E : attributes &= ~PUBLIC_SYMBOL_LABEL;
731 :
732 : // A code label can coincide with a debug and scope labels. (It can coincide
733 : // with *_END_LABEL labels because of 1-byte instructions, like RET or INT.)
734 : const LabelAttributes kCodeDebugScopeLabels =
735 : CODE_LABEL | DEBUG_START_LABEL | DEBUG_END_LABEL | SCOPE_START_LABEL |
736 E : SCOPE_END_LABEL;
737 : if ((attributes & CODE_LABEL) != 0 &&
738 E : (attributes & ~kCodeDebugScopeLabels) != 0) {
739 E : return false;
740 : }
741 :
742 : // A jump table must be paired with a data label. It may also be paired
743 : // with a debug-end label if tail-call optimization has been applied by
744 : // the compiler/linker.
745 : const LabelAttributes kJumpDataLabelAttributes =
746 E : JUMP_TABLE_LABEL | DATA_LABEL;
747 E : if (attributes & JUMP_TABLE_LABEL) {
748 E : if ((attributes & kJumpDataLabelAttributes) != kJumpDataLabelAttributes)
749 E : return false;
750 : // Filter out the debug-end label if present and check that nothing else
751 : // is set.
752 E : attributes &= ~DEBUG_END_LABEL;
753 E : if ((attributes & ~kJumpDataLabelAttributes) != 0)
754 i : return false;
755 E : return true;
756 : }
757 :
758 : // A case table must be paired with a data label and nothing else.
759 : const LabelAttributes kCaseDataLabelAttributes =
760 E : CASE_TABLE_LABEL | DATA_LABEL;
761 E : if (attributes & CASE_TABLE_LABEL) {
762 E : if ((attributes & kCaseDataLabelAttributes) != kCaseDataLabelAttributes)
763 E : return false;
764 E : if ((attributes & ~kCaseDataLabelAttributes) != 0)
765 i : return false;
766 E : return true;
767 : }
768 :
769 : // If there is no case or jump label, then a data label must be on its own.
770 E : if ((attributes & DATA_LABEL) != 0 && (attributes & ~DATA_LABEL) != 0)
771 i : return false;
772 :
773 E : return true;
774 E : }
775 :
776 : BlockGraph::Block::Block(BlockGraph* block_graph)
777 : : id_(0),
778 : type_(BlockGraph::CODE_BLOCK),
779 : size_(0),
780 : alignment_(1),
781 : name_(NULL),
782 : compiland_name_(NULL),
783 : addr_(RelativeAddress::kInvalidAddress),
784 : block_graph_(block_graph),
785 : section_(kInvalidSectionId),
786 : attributes_(0),
787 : owns_data_(false),
788 : data_(NULL),
789 E : data_size_(0) {
790 E : DCHECK(block_graph != NULL);
791 E : }
792 :
793 : BlockGraph::Block::Block(BlockId id,
794 : BlockType type,
795 : Size size,
796 : const base::StringPiece& name,
797 : BlockGraph* block_graph)
798 : : id_(id),
799 : type_(type),
800 : size_(size),
801 : alignment_(1),
802 : name_(NULL),
803 : compiland_name_(NULL),
804 : addr_(RelativeAddress::kInvalidAddress),
805 : block_graph_(block_graph),
806 : section_(kInvalidSectionId),
807 : attributes_(0),
808 : owns_data_(false),
809 : data_(NULL),
810 E : data_size_(0) {
811 E : DCHECK(block_graph != NULL);
812 E : set_name(name);
813 E : }
814 :
815 E : BlockGraph::Block::~Block() {
816 E : DCHECK(block_graph_ != NULL);
817 E : if (owns_data_)
818 E : delete [] data_;
819 E : }
820 :
821 E : void BlockGraph::Block::set_name(const base::StringPiece& name) {
822 E : DCHECK(block_graph_ != NULL);
823 : const std::string& interned_name =
824 E : block_graph_->string_table().InternString(name);
825 E : name_ = &interned_name;
826 E : }
827 :
828 E : const std::string& BlockGraph::Block::compiland_name() const {
829 E : DCHECK(block_graph_ != NULL);
830 E : if (compiland_name_ == NULL)
831 E : return block_graph_->string_table().InternString("");
832 E : return *compiland_name_;
833 E : }
834 :
835 E : void BlockGraph::Block::set_compiland_name(const base::StringPiece& name) {
836 E : DCHECK(block_graph_ != NULL);
837 : const std::string& interned_name =
838 E : block_graph_->string_table().InternString(name);
839 E : compiland_name_ = &interned_name;
840 E : }
841 :
842 E : uint8* BlockGraph::Block::AllocateRawData(size_t data_size) {
843 E : DCHECK_GT(data_size, 0u);
844 E : DCHECK_LE(data_size, size_);
845 :
846 E : uint8* new_data = new uint8[data_size];
847 E : if (!new_data)
848 i : return NULL;
849 :
850 E : if (owns_data()) {
851 i : DCHECK(data_ != NULL);
852 i : delete [] data_;
853 : }
854 :
855 E : data_ = new_data;
856 E : data_size_ = data_size;
857 E : owns_data_ = true;
858 :
859 E : return new_data;
860 E : }
861 :
862 : void BlockGraph::Block::InsertData(Offset offset,
863 : Size size,
864 E : bool always_allocate_data) {
865 E : DCHECK_GE(offset, 0);
866 E : DCHECK_LE(offset, static_cast<Offset>(size_));
867 :
868 E : if (size > 0) {
869 : // Patch up the block.
870 E : size_ += size;
871 E : ShiftOffsetItemMap(offset, size, &labels_);
872 E : ShiftReferences(this, offset, size);
873 E : ShiftReferrers(this, offset, size, &referrers_);
874 E : source_ranges_.InsertUnmappedRange(DataRange(offset, size));
875 :
876 : // Does this affect already allocated data?
877 E : if (static_cast<Size>(offset) < data_size_) {
878 : // Reallocate, shift the old data to the end, and zero out the new data.
879 E : size_t bytes_to_shift = data_size_ - offset;
880 E : ResizeData(data_size_ + size);
881 E : uint8* new_data = GetMutableData();
882 E : memmove(new_data + offset + size, new_data + offset, bytes_to_shift);
883 E : memset(new_data + offset, 0, size);
884 : }
885 : }
886 :
887 : // If we've been asked to, at least make sure that the data is allocated.
888 E : if (always_allocate_data && data_size_ < offset + size)
889 E : ResizeData(offset + size);
890 :
891 : return;
892 E : }
893 :
894 E : bool BlockGraph::Block::RemoveData(Offset offset, Size size) {
895 E : DCHECK_GE(offset, 0);
896 E : DCHECK_LE(offset, static_cast<Offset>(size_));
897 :
898 E : if (size == 0)
899 i : return true;
900 :
901 : // Ensure there are no labels in this range.
902 E : if (labels_.lower_bound(offset) != labels_.lower_bound(offset + size))
903 E : return false;
904 :
905 : // Ensure that there are no references intersecting this range.
906 E : ReferenceMap::const_iterator refc_it = references_.begin();
907 E : for (; refc_it != references_.end(); ++refc_it) {
908 E : if (refc_it->first >= static_cast<Offset>(offset + size))
909 E : break;
910 E : if (static_cast<Offset>(refc_it->first + refc_it->second.size()) > offset)
911 E : return false;
912 E : }
913 :
914 : // Ensure there are no referrers pointing to the data we want to remove.
915 E : ReferrerSet::const_iterator refr_it = referrers_.begin();
916 E : for (; refr_it != referrers_.end(); ++refr_it) {
917 E : Reference ref;
918 E : if (!refr_it->first->GetReference(refr_it->second, &ref)) {
919 i : LOG(ERROR) << "Unable to get reference from referrer.";
920 i : return false;
921 : }
922 : if (ref.offset() < static_cast<Offset>(offset + size) &&
923 E : static_cast<Offset>(ref.offset() + ref.size()) > offset) {
924 E : return false;
925 : }
926 E : }
927 :
928 : // Patch up the block.
929 E : size_ -= size;
930 E : ShiftOffsetItemMap(offset + size, -static_cast<int>(size), &labels_);
931 E : ShiftReferences(this, offset + size, -static_cast<int>(size));
932 E : ShiftReferrers(this, offset + size, -static_cast<int>(size), &referrers_);
933 E : source_ranges_.RemoveMappedRange(DataRange(offset, size));
934 :
935 : // Does this affect already allocated data?
936 E : if (static_cast<Size>(offset) < data_size_) {
937 E : size_t new_data_size = data_size_ - size;
938 : // Is there data beyond the section to delete?
939 E : if (static_cast<Size>(offset + size) < data_size_) {
940 : // Shift tail data to left.
941 E : uint8* data = GetMutableData();
942 E : size_t bytes_to_shift = data_size_ - offset - size;
943 E : size_t old_data_size = data_size_;
944 : memmove(data + new_data_size - bytes_to_shift,
945 : data + old_data_size - bytes_to_shift,
946 E : bytes_to_shift);
947 E : } else {
948 E : new_data_size = offset;
949 : }
950 E : ResizeData(new_data_size);
951 : }
952 :
953 E : return true;
954 E : }
955 :
956 : bool BlockGraph::Block::InsertOrRemoveData(Offset offset,
957 : Size current_size,
958 : Size new_size,
959 E : bool always_allocate_data) {
960 E : DCHECK_GE(offset, 0);
961 E : DCHECK_LE(offset, static_cast<Offset>(size_));
962 :
963 : // If we're growing use InsertData.
964 E : if (new_size > current_size) {
965 E : Offset insert_offset = offset + current_size;
966 E : Size insert_size = new_size - current_size;
967 E : InsertData(insert_offset, insert_size, always_allocate_data);
968 E : return true;
969 : }
970 :
971 : // If we're shrinking we'll need to use RemoveData.
972 E : if (new_size < current_size) {
973 E : Offset remove_offset = offset + new_size;
974 E : Size remove_size = current_size - new_size;
975 E : if (!RemoveData(remove_offset, remove_size))
976 i : return false;
977 : // We fall through so that 'always_allocate_data' can be respected.
978 : }
979 :
980 : // If we've been asked to, at least make sure that the data is allocated.
981 E : if (always_allocate_data && data_size_ < offset + new_size)
982 E : ResizeData(offset + new_size);
983 :
984 E : return true;
985 E : }
986 :
987 E : void BlockGraph::Block::SetData(const uint8* data, size_t data_size) {
988 : DCHECK((data_size == 0 && data == NULL) ||
989 E : (data_size != 0 && data != NULL));
990 E : DCHECK(data_size <= size_);
991 :
992 E : if (owns_data_)
993 E : delete [] data_;
994 :
995 E : owns_data_ = false;
996 E : data_ = data;
997 E : data_size_ = data_size;
998 E : }
999 :
1000 E : uint8* BlockGraph::Block::AllocateData(size_t size) {
1001 E : uint8* new_data = AllocateRawData(size);
1002 E : if (new_data == NULL)
1003 i : return NULL;
1004 :
1005 E : ::memset(new_data, 0, size);
1006 E : return new_data;
1007 E : }
1008 :
1009 E : uint8* BlockGraph::Block::CopyData(size_t size, const void* data) {
1010 E : uint8* new_data = AllocateRawData(size);
1011 E : if (new_data == NULL)
1012 i : return NULL;
1013 :
1014 E : memcpy(new_data, data, size);
1015 E : return new_data;
1016 E : }
1017 :
1018 E : const uint8* BlockGraph::Block::ResizeData(size_t new_size) {
1019 E : if (new_size == data_size_)
1020 E : return data_;
1021 :
1022 E : if (!owns_data() && new_size < data_size_) {
1023 : // Not in our ownership and shrinking. We only need to adjust our length.
1024 E : data_size_ = new_size;
1025 E : } else {
1026 : // Either our own data, or it's growing (or both).
1027 E : uint8* new_data = NULL;
1028 :
1029 : // If the new size is non-zero we need to reallocate.
1030 E : if (new_size > 0) {
1031 E : new_data = new uint8[new_size];
1032 E : CHECK(new_data);
1033 :
1034 : // Copy the (head of the) old data.
1035 E : memcpy(new_data, data_, std::min(data_size_, new_size));
1036 E : if (new_size > data_size_) {
1037 : // Zero the tail.
1038 E : memset(new_data + data_size_, 0, new_size - data_size_);
1039 : }
1040 : }
1041 :
1042 E : if (owns_data())
1043 E : delete [] data_;
1044 :
1045 E : owns_data_ = true;
1046 E : data_ = new_data;
1047 E : data_size_ = new_size;
1048 : }
1049 :
1050 E : return data_;
1051 E : }
1052 :
1053 E : uint8* BlockGraph::Block::GetMutableData() {
1054 E : DCHECK_NE(0U, data_size_);
1055 E : DCHECK(data_ != NULL);
1056 :
1057 : // Make a copy if we don't already own the data.
1058 E : if (!owns_data()) {
1059 E : uint8* new_data = new uint8[data_size_];
1060 E : if (new_data == NULL)
1061 i : return NULL;
1062 E : memcpy(new_data, data_, data_size_);
1063 E : data_ = new_data;
1064 E : owns_data_ = true;
1065 : }
1066 E : DCHECK(owns_data_);
1067 :
1068 E : return const_cast<uint8*>(data_);
1069 E : }
1070 :
1071 E : bool BlockGraph::Block::HasExternalReferrers() const {
1072 E : ReferrerSet::const_iterator it = referrers().begin();
1073 E : for (; it != referrers().end(); ++it) {
1074 E : if (it->first != this)
1075 E : return true;
1076 E : }
1077 E : return false;
1078 E : }
1079 :
1080 E : bool BlockGraph::Block::SetReference(Offset offset, const Reference& ref) {
1081 E : DCHECK(ref.referenced() != NULL);
1082 :
1083 : // Non-code blocks can be referred to by pointers that lie outside of their
1084 : // extent (due to loop induction, arrays indexed with an implicit offset,
1085 : // etc). Code blocks can not be referred to in this manner, because references
1086 : // in code blocks must be places where the flow of execution actually lands.
1087 E : if (ref.referenced()->type() == CODE_BLOCK) {
1088 : DCHECK(ref.offset() >= 0 &&
1089 E : static_cast<size_t>(ref.offset()) <= ref.referenced()->size());
1090 E : DCHECK(offset + ref.size() <= size());
1091 : }
1092 :
1093 : #if defined(DEBUG) || !defined(NDEBUG)
1094 : {
1095 : // NOTE: It might be worthwhile making SetReference return true on success,
1096 : // and false on failure as it is possible for references to conflict.
1097 : // For now we simply check for conflicts in debug builds and die an
1098 : // unglorious death if we find any.
1099 :
1100 E : if (!ref.IsValid())
1101 i : NOTREACHED() << "Trying to insert invalid reference.";
1102 :
1103 : // Examine references before us that could possibly conflict with us.
1104 E : Offset offset_begin = offset - Reference::kMaximumSize + 1;
1105 : ReferenceMap::const_iterator it =
1106 E : references_.lower_bound(offset_begin);
1107 E : for (; it != references_.end() && it->first < offset; ++it) {
1108 E : if (static_cast<Offset>(it->first + it->second.size()) > offset)
1109 i : NOTREACHED() << "Trying to insert conflicting reference.";
1110 E : }
1111 :
1112 : // Skip the reference at the same offset if there is one. This reference
1113 : // will be replaced by the new one.
1114 E : if (it != references_.end() && it->first == offset)
1115 E : ++it;
1116 :
1117 : // This is the first reference after our offset. Check to see if it lands
1118 : // within the range we want to occupy.
1119 : if (it != references_.end() &&
1120 E : it->first < static_cast<Offset>(offset + ref.size())) {
1121 i : NOTREACHED() << "Trying to insert conflicting reference.";
1122 : }
1123 : }
1124 : #endif
1125 :
1126 : // Did we have an earlier reference at this location?
1127 E : ReferenceMap::iterator it(references_.find(offset));
1128 E : bool inserted = false;
1129 E : if (it != references_.end()) {
1130 : // Erase the back reference.
1131 E : BlockGraph::Block* referenced = it->second.referenced();
1132 E : Referrer referrer(this, offset);
1133 E : size_t removed = referenced->referrers_.erase(referrer);
1134 E : DCHECK_EQ(1U, removed);
1135 :
1136 : // Lastly switch the reference.
1137 E : it->second = ref;
1138 E : } else {
1139 : // It's a new reference, insert it.
1140 E : inserted = references_.insert(std::make_pair(offset, ref)).second;
1141 E : DCHECK(inserted);
1142 : }
1143 :
1144 : // Record the back-reference.
1145 E : ref.referenced()->referrers_.insert(std::make_pair(this, offset));
1146 :
1147 E : return inserted;
1148 E : }
1149 :
1150 : bool BlockGraph::Block::GetReference(Offset offset,
1151 E : Reference* reference) const {
1152 E : DCHECK(reference != NULL);
1153 E : ReferenceMap::const_iterator it(references_.find(offset));
1154 E : if (it == references_.end())
1155 E : return false;
1156 :
1157 E : *reference = it->second;
1158 E : return true;
1159 E : }
1160 :
1161 E : bool BlockGraph::Block::RemoveReference(Offset offset) {
1162 : // Do we have reference at this location?
1163 E : ReferenceMap::iterator it(references_.find(offset));
1164 E : if (it == references_.end())
1165 i : return false;
1166 :
1167 E : BlockGraph::Block* referenced = it->second.referenced();
1168 E : Referrer referrer(this, offset);
1169 E : size_t removed = referenced->referrers_.erase(referrer);
1170 E : DCHECK_EQ(1U, removed);
1171 E : references_.erase(it);
1172 :
1173 E : return true;
1174 E : }
1175 :
1176 E : bool BlockGraph::Block::RemoveAllReferences() {
1177 E : ReferenceMap::iterator it = references_.begin();
1178 E : while (it != references_.end()) {
1179 E : ReferenceMap::iterator to_remove = it;
1180 E : ++it;
1181 :
1182 : // TODO(rogerm): As an optimization, we don't need to drop intra-block
1183 : // references when disconnecting from the block_graph. Consider having
1184 : // BlockGraph::RemoveBlockByIterator() check that the block has no
1185 : // external referrers before calling this function and erasing the
1186 : // block.
1187 :
1188 : // Unregister this reference from the referred block then erase it.
1189 E : BlockGraph::Block* referenced = to_remove->second.referenced();
1190 E : Referrer referrer(this, to_remove->first);
1191 E : size_t removed = referenced->referrers_.erase(referrer);
1192 E : DCHECK_EQ(1U, removed);
1193 E : references_.erase(to_remove);
1194 E : }
1195 :
1196 E : return true;
1197 E : }
1198 :
1199 E : bool BlockGraph::Block::SetLabel(Offset offset, const Label& label) {
1200 E : DCHECK_LE(0, offset);
1201 E : DCHECK_LE(static_cast<size_t>(offset), size_);
1202 :
1203 E : VLOG(2) << name() << ": adding "
1204 : << LabelAttributesToString(label.attributes()) << " label '"
1205 : << label.name() << "' at offset " << offset << ".";
1206 :
1207 : // Try inserting the label into the label map.
1208 : std::pair<LabelMap::iterator, bool> result(
1209 E : labels_.insert(std::make_pair(offset, label)));
1210 :
1211 : // If it was freshly inserted then we're done.
1212 E : if (result.second)
1213 E : return true;
1214 :
1215 E : return false;
1216 E : }
1217 :
1218 E : bool BlockGraph::Block::GetLabel(Offset offset, Label* label) const {
1219 E : DCHECK(offset >= 0 && static_cast<size_t>(offset) <= size_);
1220 E : DCHECK(label != NULL);
1221 :
1222 E : LabelMap::const_iterator it = labels_.find(offset);
1223 E : if (it == labels_.end())
1224 E : return false;
1225 :
1226 E : *label = it->second;
1227 E : return true;
1228 E : }
1229 :
1230 E : bool BlockGraph::Block::RemoveLabel(Offset offset) {
1231 E : DCHECK(offset >= 0 && static_cast<size_t>(offset) <= size_);
1232 :
1233 E : return labels_.erase(offset) == 1;
1234 E : }
1235 :
1236 E : bool BlockGraph::Block::HasLabel(Offset offset) const {
1237 E : DCHECK(offset >= 0 && static_cast<size_t>(offset) <= size_);
1238 :
1239 E : return labels_.find(offset) != labels_.end();
1240 E : }
1241 :
1242 : bool BlockGraph::Block::TransferReferrers(Offset offset,
1243 E : Block* new_block, TransferReferrersFlags flags) {
1244 : // Redirect all referrers to the new block, we copy the referrer set
1245 : // because it is otherwise mutated during iteration.
1246 E : BlockGraph::Block::ReferrerSet referrers = referrers_;
1247 E : BlockGraph::Block::ReferrerSet::const_iterator referrer_it(referrers.begin());
1248 :
1249 E : for (; referrer_it != referrers.end(); ++referrer_it) {
1250 : // Get the original reference.
1251 E : BlockGraph::Block::Referrer referrer = *referrer_it;
1252 : BlockGraph::Block::ReferenceMap::const_iterator found_ref(
1253 E : referrer.first->references().find(referrer.second));
1254 E : DCHECK(found_ref != referrer.first->references().end());
1255 E : BlockGraph::Reference ref(found_ref->second);
1256 :
1257 E : if ((flags & kSkipInternalReferences) != 0 && referrer.first == this)
1258 E : continue;
1259 :
1260 E : Offset new_offset = ref.offset() + offset;
1261 E : Offset new_base = ref.base() + offset;
1262 :
1263 : // Same thing as in SetReferrer, references to non-code blocks may lie
1264 : // outside the extent of the block.
1265 E : if (type_ == CODE_BLOCK) {
1266 : if (new_offset < 0 ||
1267 E : static_cast<size_t>(new_offset) > new_block->size()) {
1268 E : LOG(ERROR) << "Transferred reference lies outside of code block.";
1269 E : return false;
1270 : }
1271 : }
1272 :
1273 : // Redirect the reference to the new block with the adjusted offset.
1274 : BlockGraph::Reference new_ref(ref.type(),
1275 : ref.size(),
1276 : new_block,
1277 : new_offset,
1278 E : new_base);
1279 E : referrer.first->SetReference(referrer.second, new_ref);
1280 E : }
1281 :
1282 E : return true;
1283 E : }
1284 :
1285 : // Returns true if this block contains the given range of bytes.
1286 E : bool BlockGraph::Block::Contains(RelativeAddress address, size_t size) const {
1287 E : return (address >= addr_ && address + size <= addr_ + size_);
1288 E : }
1289 :
1290 E : bool BlockGraph::Reference::IsValid() const {
1291 : // We can't reference a NULL block.
1292 E : if (referenced_ == NULL)
1293 E : return false;
1294 :
1295 : // First see if the base address is valid for the referenced block. Base
1296 : // addresses must track an existing position in the block, between zero
1297 : // (beginning of the block), and one past the last byte (end of the
1298 : // block). These are the same offsets that are valid for InsertData(),
1299 : // such that inserting data at that position or before would shift the
1300 : // reference along. Hence, an end reference (one past the size of the
1301 : // block) would always be shifted regardless of the point of insertion;
1302 : // conversely, a reference to address zero would never move.
1303 E : if (base_ < 0 || static_cast<size_t>(base_) > referenced_->size())
1304 i : return false;
1305 :
1306 E : if (!IsValidTypeSize(type_, size_))
1307 i : return false;
1308 :
1309 E : return true;
1310 E : }
1311 :
1312 E : bool BlockGraph::Reference::IsValidTypeSize(ReferenceType type, Size size) {
1313 E : switch (type & ~RELOC_REF_BIT) {
1314 : // We see 8- and 32-bit relative JMPs.
1315 : case PC_RELATIVE_REF:
1316 E : return size == 1 || size == 4;
1317 :
1318 : // These guys are all pointer sized.
1319 : case ABSOLUTE_REF:
1320 : case RELATIVE_REF:
1321 : case FILE_OFFSET_REF:
1322 E : return size == 4;
1323 :
1324 : case SECTION_REF:
1325 E : return size == 2;
1326 : case SECTION_OFFSET_REF:
1327 E : return size == 1 || size == 4;
1328 :
1329 : default:
1330 i : NOTREACHED() << "Unknown ReferenceType.";
1331 : }
1332 :
1333 i : return false;
1334 E : }
1335 :
1336 : } // namespace block_graph
|