Coverage for /Syzygy/block_graph/block_util.cc

CoverageLines executed / instrumented / missingexe / inst / missLanguageGroup
98.0%991010.C++source

Line-by-line coverage:

   1    :  // Copyright 2012 Google Inc. All Rights Reserved.
   2    :  //
   3    :  // Licensed under the Apache License, Version 2.0 (the "License");
   4    :  // you may not use this file except in compliance with the License.
   5    :  // You may obtain a copy of the License at
   6    :  //
   7    :  //     http://www.apache.org/licenses/LICENSE-2.0
   8    :  //
   9    :  // Unless required by applicable law or agreed to in writing, software
  10    :  // distributed under the License is distributed on an "AS IS" BASIS,
  11    :  // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12    :  // See the License for the specific language governing permissions and
  13    :  // limitations under the License.
  14    :  
  15    :  #include "syzygy/block_graph/block_util.h"
  16    :  
  17    :  #include <algorithm>
  18    :  #include <vector>
  19    :  
  20    :  #include "mnemonics.h" // NOLINT
  21    :  
  22    :  namespace block_graph {
  23    :  
  24    :  bool CodeBlockAttributesAreBasicBlockSafe(
  25  E :      const block_graph::BlockGraph::Block* block) {
  26  E :    DCHECK(block != NULL);
  27  E :    DCHECK_EQ(BlockGraph::CODE_BLOCK, block->type());
  28    :  
  29    :    // If the block was built by our toolchain it's inherently safe. This
  30    :    // attribute is used to whitelist a block.
  31  E :    if (block->attributes() & BlockGraph::BUILT_BY_SYZYGY)
  32  E :      return true;
  33    :  
  34    :    // Any of the following attributes make it unsafe to basic-block
  35    :    // decompose the code block.
  36    :    static const BlockGraph::BlockAttributes kInvalidAttributes =
  37    :        BlockGraph::GAP_BLOCK |
  38    :        BlockGraph::PADDING_BLOCK |
  39    :        BlockGraph::HAS_INLINE_ASSEMBLY |
  40    :        BlockGraph::BUILT_BY_UNSUPPORTED_COMPILER |
  41    :        BlockGraph::ERRORED_DISASSEMBLY |
  42    :        BlockGraph::HAS_EXCEPTION_HANDLING |
  43    :        BlockGraph::DISASSEMBLED_PAST_END;
  44  E :    if ((block->attributes() & kInvalidAttributes) != 0)
  45  E :      return false;
  46    :  
  47  E :    return true;
  48  E :  }
  49    :  
  50    :  bool GetBasicBlockSourceRange(const BasicCodeBlock& bb,
  51  E :                                BlockGraph::Block::SourceRange* source_range) {
  52  E :    DCHECK(source_range != NULL);
  53    :  
  54    :    typedef BlockGraph::Block::SourceRange SourceRange;
  55  E :    std::vector<SourceRange> ranges;
  56    :  
  57    :    // Collect all the instruction and successor source ranges.
  58  E :    BasicBlock::Instructions::const_iterator inst_it(bb.instructions().begin());
  59  E :    for (; inst_it != bb.instructions().end(); ++inst_it) {
  60  E :      const SourceRange& range = inst_it->source_range();
  61  E :      if (range.size() > 0)
  62  E :        ranges.push_back(range);
  63  E :    }
  64  E :    BasicBlock::Successors::const_iterator succ_it(bb.successors().begin());
  65  E :    for (; succ_it != bb.successors().end(); ++succ_it) {
  66  E :      const SourceRange& range = succ_it->source_range();
  67  E :      if (range.size() > 0)
  68  E :        ranges.push_back(range);
  69  E :    }
  70    :  
  71  E :    if (ranges.empty())
  72  E :      return false;
  73    :  
  74    :    // Sort the ranges.
  75  E :    std::sort(ranges.begin(), ranges.end());
  76    :  
  77    :    // Test that they're all contiguous, while computing their total length.
  78  E :    SourceRange::Size size = ranges[0].size();
  79  E :    for (size_t i = 0; i < ranges.size() - 1; ++i) {
  80  E :      size += ranges[i + 1].size();
  81  E :      if (ranges[i].start() + ranges[i].size() != ranges[i + 1].start())
  82  E :        return false;
  83  E :    }
  84  E :    *source_range = SourceRange(ranges[0].start(), size);
  85    :  
  86  E :    return true;
  87  E :  }
  88    :  
  89    :  bool IsUnsafeReference(const BlockGraph::Block* referrer,
  90  E :                         const BlockGraph::Reference& ref) {
  91    :    // Skip references with a non-zero offset if we're
  92    :    // not instrumenting unsafe references.
  93  E :    if (ref.offset() != 0)
  94  E :      return true;
  95    :  
  96    :    BlockGraph::BlockAttributes kUnsafeAttribs =
  97    :        BlockGraph::HAS_INLINE_ASSEMBLY |
  98  E :        BlockGraph::BUILT_BY_UNSUPPORTED_COMPILER;
  99    :  
 100  E :    bool unsafe_referrer = false;
 101    :    if (referrer->type() == BlockGraph::CODE_BLOCK &&
 102  E :        (referrer->attributes() & kUnsafeAttribs) != 0) {
 103  E :      unsafe_referrer = true;
 104    :    }
 105    :  
 106  E :    DCHECK_EQ(BlockGraph::CODE_BLOCK, ref.referenced()->type());
 107  E :    bool unsafe_block = (ref.referenced()->attributes() & kUnsafeAttribs) != 0;
 108    :  
 109    :    // If both the referrer and the referenced blocks are unsafe, we can't
 110    :    // safely assume that this reference represents a call semantics,
 111    :    // e.g. where a return address is at the top of stack at entry.
 112    :    // Ideally we'd decide this on the basis of a full stack analysis, but
 113    :    // beggers can't be choosers, plus for hand-coded assembly that's
 114    :    // the halting problem :).
 115    :    // For instrumentation that uses return address swizzling, instrumenting
 116    :    // an unsafe reference leads to crashes, so better to back off and get
 117    :    // slightly less coverage.
 118  E :    return unsafe_referrer && unsafe_block;
 119  E :  }
 120    :  
 121    :  bool HasUnexpectedStackFrameManipulation(
 122  E :      block_graph::BasicBlockSubGraph* subgraph) {
 123  E :    DCHECK(subgraph != NULL);
 124    :  
 125    :    BasicBlockSubGraph::BBCollection::iterator it =
 126  E :        subgraph->basic_blocks().begin();
 127    :  
 128    :    // Process each basic block to find an invalid stack manipulation.
 129  E :    for (; it != subgraph->basic_blocks().end(); ++it) {
 130  E :      BasicCodeBlock* bb = BasicCodeBlock::Cast(*it);
 131  E :      if (bb == NULL)
 132  E :        continue;
 133    :  
 134    :      // Process each instruction.
 135  E :      BasicBlock::Instructions::iterator iter_inst = bb->instructions().begin();
 136  E :      for (; iter_inst != bb->instructions().end(); ++iter_inst) {
 137  E :        const _DInst& repr = iter_inst->representation();
 138    :  
 139    :        // Consider only instructions whose first operand is EBP (read/write).
 140  E :        if (repr.ops[0].type == O_REG && repr.ops[0].index == R_EBP) {
 141    :          // PUSH/POP EBP is valid.
 142  E :          if (repr.opcode == I_POP || repr.opcode == I_PUSH)
 143  E :            continue;
 144    :  
 145    :          // MOV EBP, ESP is valid.
 146    :          if (repr.opcode == I_MOV &&
 147  E :              repr.ops[1].type == O_REG && repr.ops[1].index == R_ESP)
 148  E :            continue;
 149    :  
 150    :          // We found a non conventional write to register EBP (stack pointer).
 151  E :          return true;
 152    :        }
 153  E :      }
 154  E :    }
 155    :  
 156    :    // There is no unconventional/unexpected stack frame manipulation.
 157  E :    return false;
 158  E :  }
 159    :  
 160    :  bool GetJumpTableSize(const BlockGraph::Block* block,
 161    :      BlockGraph::Block::LabelMap::const_iterator jump_table_label,
 162  E :      size_t* table_size) {
 163  E :    DCHECK(block != NULL);
 164  E :    DCHECK(table_size != NULL);
 165    :    DCHECK(block->HasLabel(jump_table_label->first) &&
 166  E :        block->labels().at(jump_table_label->first) == jump_table_label->second);
 167    :  
 168    :    // Ensure that this label has the jump table attribute.
 169  E :    if (!jump_table_label->second.has_attributes(BlockGraph::JUMP_TABLE_LABEL)) {
 170  i :      LOG(ERROR) << "This label doesn't have the jump table attribute.";
 171  i :      return false;
 172    :    }
 173    :  
 174  E :    BlockGraph::Offset beginning_offset = jump_table_label->first;
 175  E :    BlockGraph::Offset end_offset = beginning_offset;
 176    :  
 177    :    // The maximum end offset for this jump table is either the offset of the next
 178    :    // label or the end of this block.
 179  E :    BlockGraph::Offset max_end_offset = 0;
 180  E :    BlockGraph::Block::LabelMap::const_iterator next_label = ++(jump_table_label);
 181  E :    if (next_label != block->labels().end())
 182  E :      max_end_offset = next_label->first;
 183  E :    else
 184  E :      max_end_offset = block->size();
 185    :  
 186  E :    DCHECK(max_end_offset != 0);
 187    :  
 188    :    BlockGraph::Block::ReferenceMap::const_iterator iter_ref =
 189  E :        block->references().find(beginning_offset);
 190  E :    DCHECK(iter_ref != block->references().end());
 191  E :    DCHECK(iter_ref->first == beginning_offset);
 192    :    DCHECK(iter_ref->second.type() == BlockGraph::ABSOLUTE_REF ||
 193  E :           iter_ref->second.type() == BlockGraph::RELATIVE_REF);
 194  E :    DCHECK_EQ(BlockGraph::Reference::kMaximumSize, iter_ref->second.size());
 195    :  
 196  E :    BlockGraph::Size reference_size = iter_ref->second.size();
 197    :  
 198    :    // Iterates over the references to calculate the size of this jump table, we
 199    :    // stop once we reach the maximum end offset or as soon as we find a reference
 200    :    // that is not contiguous to the previous one.
 201  E :    while (end_offset < max_end_offset) {
 202  E :      end_offset += reference_size;
 203  E :      ++iter_ref;
 204  E :      if (iter_ref == block->references().end() || iter_ref->first != end_offset)
 205  E :        break;
 206  E :      DCHECK_EQ(end_offset, iter_ref->first);
 207    :      DCHECK(iter_ref->second.type() == BlockGraph::ABSOLUTE_REF ||
 208  E :             iter_ref->second.type() == BlockGraph::RELATIVE_REF);
 209  E :      DCHECK_EQ(BlockGraph::Reference::kMaximumSize, iter_ref->second.size());
 210  E :    }
 211    :  
 212    :    *table_size = (end_offset - beginning_offset) /
 213  E :        BlockGraph::Reference::kMaximumSize;
 214    :  
 215  E :    return true;
 216  E :  }
 217    :  
 218    :  }  // namespace block_graph

Coverage information generated Thu Jul 04 09:34:53 2013.