Coverage for /Syzygy/pe/hot_patching_decomposer_unittest.cc

CoverageLines executed / instrumented / missingexe / inst / missLanguageGroup
100.0%1111110.C++test

Line-by-line coverage:

   1    :  // Copyright 2015 Google Inc. All Rights Reserved.
   2    :  //
   3    :  // Licensed under the Apache License, Version 2.0 (the "License");
   4    :  // you may not use this file except in compliance with the License.
   5    :  // You may obtain a copy of the License at
   6    :  //
   7    :  //     http://www.apache.org/licenses/LICENSE-2.0
   8    :  //
   9    :  // Unless required by applicable law or agreed to in writing, software
  10    :  // distributed under the License is distributed on an "AS IS" BASIS,
  11    :  // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12    :  // See the License for the specific language governing permissions and
  13    :  // limitations under the License.
  14    :  
  15    :  // TODO(cseri): Write a test that tests what happens on a relocated .dll
  16    :  
  17    :  #include "syzygy/pe/hot_patching_decomposer.h"
  18    :  
  19    :  #include <windows.h>
  20    :  
  21    :  #include "gtest/gtest.h"
  22    :  #include "syzygy/block_graph/basic_block_decomposer.h"
  23    :  #include "syzygy/instrument/transforms/asan_transform.h"
  24    :  #include "syzygy/pe/hot_patching_unittest_util.h"
  25    :  
  26    :  using block_graph::BlockGraph;
  27    :  
  28    :  namespace pe {
  29    :  
  30    :  namespace {
  31    :  
  32    :  using core::RelativeAddress;
  33    :  
  34    :  class HotPatchingDecomposerTest : public testing::HotPatchingTestDllTest {
  35    :   public:
  36  E :    HotPatchingDecomposerTest() {}
  37    :  
  38  E :    bool IsHotPatchableBlock(const BlockGraph::Block* block) {
  39    :      // The in-memory blockgraph contains two kinds of code blocks: the blocks
  40    :      // loaded from the metadata stream and the dummy blocks created while
  41    :      // parsing references. The latter has the BUILT_BY_UNSUPPORTED_COMPILER flag
  42    :      // set.
  43    :      return block->type() == BlockGraph::CODE_BLOCK &&
  44  E :             !(block->attributes() & BlockGraph::BUILT_BY_UNSUPPORTED_COMPILER);
  45  E :    }
  46    :  
  47    :    // Checks if the code and data labels are correctly loaded.
  48    :    // @param orig_block The original block.
  49    :    // @param block The decomposed block.
  50    :    // @param code_end The end of the code is written here.
  51    :    void CheckLabels(const BlockGraph::Block* orig_block,
  52    :                     const BlockGraph::Block* block,
  53  E :                     BlockGraph::Offset* code_end) {
  54    :      // There should be a code label at position 0:
  55  E :      ASSERT_EQ(1U, block->labels().count(0));
  56  E :      EXPECT_TRUE(block->labels().at(0).has_attributes(BlockGraph::CODE_LABEL));
  57    :  
  58  E :      if (block->labels().size() == 1) {
  59    :        // There is no data label, the whole block contains code.
  60  E :        *code_end = static_cast<BlockGraph::Offset>(block->data_size());
  61  E :      } else {
  62    :        // A data label must be the second label.
  63    :        // Recover the data label.
  64  E :        auto it = ++block->labels().begin();
  65  E :        EXPECT_TRUE(it->second.has_attributes(BlockGraph::DATA_LABEL));
  66    :  
  67    :        // The code size is the offset of the data label.
  68  E :        *code_end = it->first;
  69    :      }
  70    :  
  71    :      // Compare recovered labels with the labels of the original block.
  72    :      // These must be true:
  73    :      // - There should be no DATA_LABEL before |*code_end|
  74    :      // - Each JUMP_TABLE_LABEL must be recovered.
  75    :      // - Each CASE_TABLE_LABEL must be recovered.
  76  E :      for (const auto& entry : orig_block->labels()) {
  77  E :        BlockGraph::Offset label_offset = entry.first;
  78  E :        const BlockGraph::Label& orig_label = entry.second;
  79    :  
  80  E :        if (orig_label.has_attributes(BlockGraph::DATA_LABEL)) {
  81  E :          ASSERT_GE(label_offset, *code_end);
  82    :        }
  83  E :        if (orig_label.has_attributes(BlockGraph::JUMP_TABLE_LABEL)) {
  84  E :          ASSERT_EQ(1U, block->labels().count(label_offset));
  85  E :          const BlockGraph::Label& label = block->labels().at(label_offset);
  86    :  
  87  E :          EXPECT_TRUE(label.has_attributes(BlockGraph::DATA_LABEL));
  88  E :          EXPECT_TRUE(label.has_attributes(BlockGraph::JUMP_TABLE_LABEL));
  89    :        }
  90  E :        if (orig_label.has_attributes(BlockGraph::CASE_TABLE_LABEL)) {
  91  E :          ASSERT_EQ(1U, block->labels().count(label_offset));
  92  E :          const BlockGraph::Label& label = block->labels().at(label_offset);
  93    :  
  94  E :          EXPECT_TRUE(label.has_attributes(BlockGraph::DATA_LABEL));
  95  E :          EXPECT_TRUE(label.has_attributes(BlockGraph::CASE_TABLE_LABEL));
  96    :        }
  97    :      }
  98  E :    }
  99    :  
 100    :    // Checks if the block data is correctly loaded.
 101    :    // @param orig_block The original block.
 102    :    // @param block The decomposed block.
 103    :    void CheckData(const BlockGraph::Block* orig_block,
 104  E :                   const BlockGraph::Block* block) {
 105    :      // Compare the data in the block byte-by-byte.
 106  E :      for (size_t i = 0; i < orig_block->data_size(); ++i) {
 107    :        // Do not compare bytes that belong to inter-block references and
 108    :        // in-block absolute references. These references don't have their
 109    :        // final value in the original block_graph because they are calculated
 110    :        // at a later phase of writing a PE file. Also, absolute references
 111    :        // might get relocated.
 112  E :        auto ref_it = orig_block->references().find(i);
 113    :        if (ref_it != orig_block->references().end() &&
 114    :            (ref_it->second.referenced() != orig_block ||
 115  E :                ref_it->second.type() == BlockGraph::ABSOLUTE_REF) ) {
 116  E :          ASSERT_EQ(4U, ref_it->second.size()); // We expect 4-byte refs.
 117  E :          i += ref_it->second.size() - 1;
 118  E :          continue;
 119    :        }
 120    :  
 121  E :        EXPECT_EQ(orig_block->data()[i], block->data()[i]);
 122  E :      }
 123  E :    }
 124    :  
 125    :    // Checks if the references are correctly loaded.
 126    :    // @param orig_block The original block.
 127    :    // @param block The decomposed block.
 128    :    // @param code_end The end of the code part of the block. A different set of
 129    :    //     references are loaded for the code and the data part.
 130    :    void CheckReferences(const BlockGraph::Block* orig_block,
 131    :                         const BlockGraph::Block* block,
 132  E :                         BlockGraph::Offset code_end) {
 133    :      // Look at the references. The references in the decomposed block
 134    :      // must be a subset of the references in the original block.
 135  E :      size_t found_references = 0U;
 136  E :      for (const auto& entry : orig_block->references()) {
 137  E :        BlockGraph::Offset ref_offset = entry.first;
 138  E :        const BlockGraph::Reference& orig_ref = entry.second;
 139    :  
 140  E :        BlockGraph::Reference ref;
 141  E :        bool found = block->GetReference(ref_offset, &ref);
 142    :  
 143    :        // There references must be loaded in the code part:
 144    :        // - Inter-block PC-relative references.
 145    :        // - In-block absolute references, unless they refer a case table.
 146  E :        if (ref_offset < code_end) {
 147    :          if (orig_ref.type() == BlockGraph::PC_RELATIVE_REF &&
 148  E :              orig_block != orig_ref.referenced()) {
 149  E :            EXPECT_TRUE(found);
 150  E :          } else if (orig_ref.type() == BlockGraph::ABSOLUTE_REF &&
 151  E :              orig_block == orig_ref.referenced()) {
 152    :            if (orig_block->labels().count(orig_ref.offset()) &&
 153    :                orig_block->labels().at(orig_ref.offset()).has_attributes(
 154  E :                    BlockGraph::CASE_TABLE_LABEL)) {
 155  E :            } else {
 156  E :              EXPECT_TRUE(found);
 157    :            }
 158    :          }
 159  E :        } else {
 160    :          // Only in-block references are required in the data part.
 161  E :          if (orig_ref.referenced() == orig_block)
 162  E :            EXPECT_TRUE(found);
 163    :        }
 164    :  
 165  E :        if (!found)
 166  E :          continue;
 167    :  
 168  E :        ++found_references;
 169    :  
 170  E :        if (IsHotPatchableBlock(ref.referenced())) {
 171    :          // Refers a hot patchable block.
 172  E :          EXPECT_EQ(orig_ref.base(), ref.base());
 173  E :          EXPECT_EQ(orig_ref.offset(), ref.offset());
 174  E :          EXPECT_EQ(orig_ref.size(), ref.size());
 175  E :          EXPECT_EQ(orig_ref.referenced()->addr(),
 176    :                    ref.referenced()->addr());
 177  E :        } else {
 178    :          // Refers a code area not in a hot patchable block.
 179  E :          EXPECT_EQ(0, ref.base());
 180  E :          EXPECT_EQ(0, ref.offset());
 181  E :          EXPECT_EQ(orig_ref.size(), ref.size());
 182  E :          EXPECT_EQ(orig_ref.referenced()->addr() + orig_ref.offset(),
 183    :                    ref.referenced()->addr());
 184    :        }
 185  E :        EXPECT_EQ(orig_ref.type(), ref.type());
 186    :      }
 187    :      // If these are not equal that means that there is a reference not present
 188    :      // in the original block.
 189  E :      ASSERT_EQ(found_references, block->references().size());
 190  E :    }
 191    :  
 192    :    void CheckIfBlockLoadedCorrectly(const BlockGraph::Block* orig_block,
 193  E :                                     const BlockGraph::Block* block) {
 194    :  
 195    :      // Check that they have the same size.
 196  E :      ASSERT_EQ(orig_block->data_size(), block->data_size());
 197    :  
 198  E :      BlockGraph::Offset code_end = 0;
 199  E :      ASSERT_NO_FATAL_FAILURE(CheckLabels(orig_block, block, &code_end));
 200  E :      ASSERT_GT(code_end, 0);
 201    :  
 202  E :      ASSERT_NO_FATAL_FAILURE(CheckData(orig_block, block));
 203    :  
 204  E :      ASSERT_NO_FATAL_FAILURE(CheckReferences(orig_block, block, code_end));
 205  E :    }
 206    :  
 207    :   protected:
 208    :    // The block graph containing the result of the in-memory decomposer.
 209    :    BlockGraph block_graph_;
 210    :  };
 211    :  
 212    :  }  // namespace
 213    :  
 214  E :  TEST_F(HotPatchingDecomposerTest, TestHotPatchingDecomposer) {
 215  E :    ASSERT_NO_FATAL_FAILURE(HotPatchInstrumentTestDll());
 216    :  
 217    :    // Load hot patched library into memory.
 218  E :    testing::ScopedHMODULE module;
 219  E :    LoadTestDll(hp_test_dll_path_, &module);
 220    :  
 221    :    // Decompose hot patched library into a block graph.
 222  E :    pe::ImageLayout layout(&block_graph_);
 223  E :    HotPatchingDecomposer hp_decomposer(module);
 224  E :    ASSERT_TRUE(hp_decomposer.Decompose(&layout));
 225    :  
 226    :    // Count code blocks to check if all of them is present in the decomposed
 227    :    // block graph.
 228  E :    size_t code_block_count = 0;
 229  E :    for (const auto& entry : block_graph_.blocks()) {
 230  E :      const BlockGraph::Block* block = &entry.second;
 231    :  
 232  E :      if (IsHotPatchableBlock(block)) {
 233  E :        ++code_block_count;
 234    :      }
 235  E :    }
 236  E :    ASSERT_EQ(hp_transform_.blocks_prepared().size(), code_block_count);
 237    :  
 238    :    // Check that there is a corresponding block for each code block in the
 239    :    // original image with the same content.
 240  E :    for (const auto& entry : block_graph_.blocks()) {
 241  E :      const BlockGraph::Block* block = &entry.second;
 242    :  
 243  E :      if (IsHotPatchableBlock(block)) {
 244    :        // To find the corresponding block we look for a block with that has
 245    :        // the same relative address. (The relinker updates the final relative
 246    :        // addresses to their final values.)
 247  E :        int found = 0;
 248  E :        for (const auto& orig_entry : relinker_.block_graph().blocks()) {
 249  E :          const BlockGraph::Block* orig_block = &orig_entry.second;
 250    :  
 251  E :          if (orig_block->addr() == block->addr()) {
 252  E :            ++found;
 253  E :            ASSERT_NO_FATAL_FAILURE(CheckIfBlockLoadedCorrectly(orig_block,
 254    :                                                                block));
 255    :          }
 256    :        }
 257  E :        ASSERT_EQ(1, found);
 258    :  
 259    :        // Test if the decomposed block can be basic block decomposed. This test
 260    :        // ensures that we load all data needed for basic block decomposition.
 261    :        // Note: As the hot patching transformation uses a basic block
 262    :        // decomposer, all hot patchable blocks must be basic block
 263    :        // decomposable.
 264  E :        block_graph::BasicBlockSubGraph subgraph;
 265  E :        block_graph::BasicBlockDecomposer dec(block, &subgraph);
 266  E :        ASSERT_TRUE(dec.Decompose());
 267  E :      }
 268  E :    }
 269  E :  }
 270    :  
 271    :  }  // namespace pe

Coverage information generated Thu Jan 14 17:40:38 2016.