1 : // Copyright 2012 Google Inc. All Rights Reserved.
2 : //
3 : // Licensed under the Apache License, Version 2.0 (the "License");
4 : // you may not use this file except in compliance with the License.
5 : // You may obtain a copy of the License at
6 : //
7 : // http://www.apache.org/licenses/LICENSE-2.0
8 : //
9 : // Unless required by applicable law or agreed to in writing, software
10 : // distributed under the License is distributed on an "AS IS" BASIS,
11 : // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 : // See the License for the specific language governing permissions and
13 : // limitations under the License.
14 :
15 : #include "syzygy/agent/asan/shadow.h"
16 :
17 : #include <memory>
18 :
19 : #include "base/rand_util.h"
20 : #include "base/strings/stringprintf.h"
21 : #include "gtest/gtest.h"
22 : #include "syzygy/common/align.h"
23 : #include "syzygy/testing/metrics.h"
24 :
25 : namespace agent {
26 : namespace asan {
27 :
28 : namespace {
29 :
30 : template <typename AccessType>
31 E : void ShadowUtilPerfTest() {
32 E : const size_t kBufSize = 10240;
33 E : ALIGNAS(8) uint8_t buf[kBufSize] = {};
34 E : uint8_t* end = buf + kBufSize;
35 :
36 E : uint64_t tnet = 0;
37 : // Test all (mod 8) head and tail alignments.
38 E : for (size_t i = 0; i < 8; ++i) {
39 E : for (size_t j = 0; j < 8; ++j) {
40 E : ::memset(buf, 0xCC, i);
41 E : ::memset(buf + i, 0, kBufSize - i - j);
42 E : ::memset(end - j, 0xCC, j);
43 E : uint64_t t0 = ::__rdtsc();
44 E : ASSERT_TRUE(internal::IsZeroBufferImpl<AccessType>(buf + i, end - j));
45 E : uint64_t t1 = ::__rdtsc();
46 E : tnet += t1 - t0;
47 E : }
48 E : }
49 :
50 E : testing::EmitMetric(
51 : base::StringPrintf("Syzygy.Asan.Shadow.IsZeroBufferImpl.%i",
52 : sizeof(AccessType)),
53 : tnet);
54 E : }
55 :
56 : template <typename AccessType>
57 E : void ShadowUtilTest() {
58 E : const size_t kBufSize = 128;
59 E : ALIGNAS(8) uint8_t buf[kBufSize] = {};
60 E : uint8_t* end = buf + kBufSize;
61 :
62 : // Test all (mod 8) head and tail alignments.
63 E : for (size_t i = 0; i < 8; ++i) {
64 E : for (size_t j = 0; j < 8; ++j) {
65 E : ::memset(buf, 0xCC, i);
66 E : ::memset(buf + i, 0, kBufSize - i - j);
67 E : ::memset(end - j, 0xCC, j);
68 :
69 : // Test that a non-zero byte anywhere in the buffer is detected.
70 E : for (size_t k = i; k < kBufSize - j; ++k) {
71 E : buf[k] = 1;
72 E : ASSERT_FALSE(internal::IsZeroBufferImpl<AccessType>(buf + i, end - j));
73 E : buf[k] = 0;
74 E : }
75 E : }
76 E : }
77 E : }
78 :
79 : // A derived class to expose protected members for unit-testing.
80 : class TestShadow : public Shadow {
81 : public:
82 E : TestShadow() : Shadow(kTestShadowSize) {
83 E : }
84 :
85 : // We'll simulate memory as being 1GB in size.
86 : static const size_t kTestShadowSize =
87 : (1 * 1024 * 1024 * 1024) >> kShadowRatioLog;
88 :
89 : // Protected functions that we want to unittest directly.
90 : using Shadow::Reset;
91 : using Shadow::ScanLeftForBracketingBlockStart;
92 : using Shadow::ScanRightForBracketingBlockEnd;
93 : using Shadow::shadow_;
94 : };
95 :
96 : // A fixture for shadow memory tests.
97 : class ShadowTest : public testing::Test {
98 : public:
99 : TestShadow test_shadow;
100 : };
101 :
102 : } // namespace
103 :
104 E : TEST_F(ShadowTest, IsZeroBufferImplTest) {
105 E : ShadowUtilPerfTest<uint8_t>();
106 E : ShadowUtilPerfTest<uint16_t>();
107 E : ShadowUtilPerfTest<uint32_t>();
108 E : ShadowUtilPerfTest<uint64_t>();
109 :
110 E : ShadowUtilTest<uint8_t>();
111 E : ShadowUtilTest<uint16_t>();
112 E : ShadowUtilTest<uint32_t>();
113 E : ShadowUtilTest<uint64_t>();
114 E : }
115 :
116 E : TEST_F(ShadowTest, PoisonUnpoisonAccess) {
117 E : for (size_t count = 0; count < 100; ++count) {
118 : // Use a random 8-byte aligned end address.
119 E : const size_t size = base::RandInt(1, 16384);
120 : const uint8_t* end_addr = reinterpret_cast<const uint8_t*>(
121 E : base::RandInt(65536, 10 * 1024 * 1024) * 8);
122 E : const uint8_t* start_addr = end_addr - size;
123 :
124 E : for (size_t i = 0; i < size; ++i)
125 E : EXPECT_TRUE(test_shadow.IsAccessible(start_addr + i));
126 :
127 E : test_shadow.Poison(start_addr, size, kAsanReservedMarker);
128 E : for (size_t i = 0; i < size; ++i)
129 E : EXPECT_FALSE(test_shadow.IsAccessible(start_addr + i));
130 E : EXPECT_TRUE(test_shadow.IsAccessible(start_addr - 1));
131 E : EXPECT_TRUE(test_shadow.IsAccessible(start_addr + size));
132 :
133 E : const size_t aligned_size = ::common::AlignUp(size,
134 : kShadowRatio);
135 E : const uint8_t* aligned_start_addr = end_addr - aligned_size;
136 E : test_shadow.Unpoison(aligned_start_addr, aligned_size);
137 E : for (size_t i = 0; i < size; ++i)
138 E : EXPECT_TRUE(test_shadow.IsAccessible(start_addr + i));
139 E : }
140 E : }
141 :
142 E : TEST_F(ShadowTest, SetUpAndTearDown) {
143 : // Don't check all the shadow bytes otherwise this test will take too much
144 : // time.
145 E : const size_t kLookupInterval = 25;
146 :
147 E : intptr_t shadow_array_start = reinterpret_cast<intptr_t>(test_shadow.shadow_);
148 E : size_t shadow_start = shadow_array_start >> 3;
149 E : size_t shadow_end = shadow_start + (test_shadow.length() >> 3);
150 :
151 E : const size_t non_addressable_memory_end = (0x10000 >> 3);
152 :
153 E : test_shadow.SetUp();
154 E : for (size_t i = shadow_start; i < shadow_end; i += kLookupInterval)
155 E : ASSERT_EQ(kAsanMemoryMarker, test_shadow.shadow_[i]);
156 :
157 E : for (size_t i = 0; i < non_addressable_memory_end; i += kLookupInterval)
158 E : ASSERT_EQ(kInvalidAddressMarker, test_shadow.shadow_[i]);
159 :
160 E : test_shadow.TearDown();
161 E : for (size_t i = shadow_start; i < shadow_end; i += kLookupInterval)
162 E : ASSERT_EQ(kHeapAddressableMarker, test_shadow.shadow_[i]);
163 :
164 E : for (size_t i = 0; i < non_addressable_memory_end; i += kLookupInterval)
165 E : ASSERT_EQ(kHeapAddressableMarker, test_shadow.shadow_[i]);
166 E : }
167 :
168 : namespace {
169 :
170 : const size_t kSizesToTest[] = {4, 7, 12, 15, 21, 87, 88};
171 :
172 : class ScopedAlignedArray {
173 : public:
174 E : uint8_t* get_aligned_array() { return test_array_; }
175 E : size_t get_aligned_length() { return kArrayLength; }
176 :
177 : private:
178 : static const size_t kArrayLength = 0x100;
179 :
180 : ALIGNAS(8) uint8_t test_array_[kArrayLength];
181 : };
182 :
183 : } // namespace
184 :
185 E : TEST_F(ShadowTest, GetNullTerminatedArraySize) {
186 : ScopedAlignedArray test_array;
187 E : uint8_t* aligned_test_array = test_array.get_aligned_array();
188 E : size_t aligned_array_length = test_array.get_aligned_length();
189 :
190 E : const uint8_t kMarkerValue = 0xAA;
191 E : ::memset(aligned_test_array, kMarkerValue, aligned_array_length);
192 E : test_shadow.Poison(
193 : aligned_test_array, aligned_array_length, kAsanReservedMarker);
194 :
195 E : for (size_t size_to_test : kSizesToTest) {
196 E : test_shadow.Unpoison(aligned_test_array, size_to_test);
197 E : size_t size = 0;
198 :
199 : // Put a null byte at the end of the array and call the
200 : // GetNullTerminatedArraySize function with a 1-byte template argument. This
201 : // simulates the use of this function for a null terminated string.
202 E : aligned_test_array[size_to_test - 1] = 0;
203 E : EXPECT_TRUE(test_shadow.GetNullTerminatedArraySize<uint8_t>(
204 : aligned_test_array, 0U, &size));
205 E : EXPECT_EQ(size_to_test, size);
206 :
207 E : if (size_to_test % sizeof(uint16_t) == 0) {
208 : // Call the GetNullTerminatedArraySize with a 2-byte template argument.
209 : // As there is only one null byte at the end of the array we expect the
210 : // function to return false.
211 E : EXPECT_FALSE(test_shadow.GetNullTerminatedArraySize<uint16_t>(
212 : aligned_test_array, 0U, &size));
213 E : EXPECT_EQ(size_to_test, size);
214 : // Put a second null byte at the end of the array and call the function
215 : // again, this time we expect the function to succeed.
216 E : aligned_test_array[size_to_test - sizeof(uint16_t)] = 0;
217 E : EXPECT_TRUE(test_shadow.GetNullTerminatedArraySize<uint16_t>(
218 : aligned_test_array, 0U, &size));
219 E : EXPECT_EQ(size_to_test, size);
220 E : aligned_test_array[size_to_test - sizeof(uint16_t)] = kMarkerValue;
221 : }
222 E : aligned_test_array[size_to_test - 1] = kMarkerValue;
223 :
224 E : aligned_test_array[size_to_test] = kMarkerValue;
225 E : EXPECT_FALSE(test_shadow.GetNullTerminatedArraySize<uint8_t>(
226 : aligned_test_array, 0U, &size));
227 E : EXPECT_EQ(size_to_test, size);
228 E : EXPECT_TRUE(test_shadow.GetNullTerminatedArraySize<uint8_t>(
229 : aligned_test_array, size_to_test, &size));
230 :
231 E : test_shadow.Poison(aligned_test_array,
232 : ::common::AlignUp(size_to_test, kShadowRatio),
233 : kAsanReservedMarker);
234 E : }
235 E : test_shadow.Unpoison(aligned_test_array, aligned_array_length);
236 E : }
237 :
238 E : TEST_F(ShadowTest, IsAccessibleRange) {
239 : ScopedAlignedArray scoped_test_array;
240 E : const uint8_t* aligned_test_array = scoped_test_array.get_aligned_array();
241 E : size_t aligned_array_length = scoped_test_array.get_aligned_length();
242 :
243 : // Poison the aligned array.
244 E : test_shadow.Poison(aligned_test_array, aligned_array_length,
245 : kAsanReservedMarker);
246 :
247 : // Use a pointer into the array to allow for the header to be poisoned.
248 E : const uint8_t* test_array = aligned_test_array + kShadowRatio;
249 E : size_t test_array_length = aligned_array_length - kShadowRatio;
250 : // Zero-length range is always accessible.
251 E : EXPECT_TRUE(test_shadow.IsRangeAccessible(test_array, 0U));
252 :
253 E : for (size_t size : kSizesToTest) {
254 E : ASSERT_GT(test_array_length, size);
255 :
256 E : test_shadow.Unpoison(test_array, size);
257 :
258 : // An overflowing range is always inaccessible.
259 E : EXPECT_FALSE(
260 : test_shadow.IsRangeAccessible(test_array + 3, static_cast<size_t>(-3)));
261 :
262 E : for (size_t i = 0; i < size; ++i) {
263 : // Try valid ranges at every starting position inside the unpoisoned
264 : // range.
265 E : EXPECT_TRUE(test_shadow.IsRangeAccessible(test_array + i, size - i));
266 :
267 : // Try valid ranges ending at every poisition inside the unpoisoned range.
268 E : EXPECT_TRUE(test_shadow.IsRangeAccessible(test_array, size - i));
269 E : }
270 :
271 E : for (size_t i = 1; i < kShadowRatio; ++i) {
272 : // Try invalid ranges at starting positions outside the unpoisoned range.
273 E : EXPECT_FALSE(test_shadow.IsRangeAccessible(test_array - i, size));
274 :
275 : // Try invalid ranges at ending positions outside the unpoisoned range.
276 E : EXPECT_FALSE(test_shadow.IsRangeAccessible(test_array, size + i));
277 E : }
278 E : }
279 E : test_shadow.Unpoison(aligned_test_array, aligned_array_length);
280 E : }
281 :
282 E : TEST_F(ShadowTest, FindFirstPoisonedByte) {
283 : ScopedAlignedArray scoped_test_array;
284 E : const uint8_t* aligned_test_array = scoped_test_array.get_aligned_array();
285 E : size_t aligned_array_length = scoped_test_array.get_aligned_length();
286 :
287 : // Poison the aligned array.
288 E : test_shadow.Poison(aligned_test_array, aligned_array_length,
289 : kAsanReservedMarker);
290 :
291 : // Use a pointer into the array to allow for the header to be poisoned.
292 E : const uint8_t* test_array = aligned_test_array + kShadowRatio;
293 E : size_t test_array_length = aligned_array_length - kShadowRatio;
294 : // Zero-length range is always accessible.
295 E : EXPECT_EQ(nullptr, test_shadow.FindFirstPoisonedByte(test_array, 0U));
296 :
297 E : for (size_t size : kSizesToTest) {
298 E : ASSERT_GT(test_array_length, size);
299 :
300 E : test_shadow.Unpoison(test_array, size);
301 :
302 : // An overflowing range is always inaccessible.
303 E : EXPECT_EQ(test_array + 3, test_shadow.FindFirstPoisonedByte(
304 : test_array + 3, static_cast<size_t>(-3)));
305 :
306 E : for (size_t i = 0; i < size; ++i) {
307 : // Try valid ranges at every starting position inside the unpoisoned
308 : // range.
309 E : EXPECT_EQ(nullptr,
310 : test_shadow.FindFirstPoisonedByte(test_array + i, size - i));
311 :
312 : // Try valid ranges ending at every poisition inside the unpoisoned range.
313 E : EXPECT_EQ(nullptr,
314 : test_shadow.FindFirstPoisonedByte(test_array, size - i));
315 E : }
316 :
317 E : for (size_t i = 1; i < kShadowRatio; ++i) {
318 : // Try invalid ranges at starting positions outside the unpoisoned range.
319 E : EXPECT_EQ(test_array - i,
320 : test_shadow.FindFirstPoisonedByte(test_array - i, size));
321 :
322 : // Try invalid ranges at ending positions outside the unpoisoned range.
323 E : EXPECT_EQ(test_array + size,
324 : test_shadow.FindFirstPoisonedByte(test_array, size + i));
325 E : }
326 E : }
327 E : test_shadow.Unpoison(aligned_test_array, aligned_array_length);
328 E : }
329 :
330 E : TEST_F(ShadowTest, MarkAsFreed) {
331 E : BlockLayout l0 = {}, l1 = {};
332 E : EXPECT_TRUE(BlockPlanLayout(kShadowRatio, kShadowRatio, 16, 30, 30, &l1));
333 E : EXPECT_TRUE(BlockPlanLayout(kShadowRatio, kShadowRatio,
334 E : l1.block_size + 2 * kShadowRatio, 30, 30, &l0));
335 :
336 E : uint8_t* data = new uint8_t[l0.block_size];
337 :
338 E : uint8_t* d0 = data;
339 E : BlockInfo i0 = {};
340 E : BlockInitialize(l0, d0, false, &i0);
341 E : test_shadow.PoisonAllocatedBlock(i0);
342 :
343 E : uint8_t* d1 = i0.RawBody() + kShadowRatio;
344 E : BlockInfo i1 = {};
345 E : BlockInitialize(l1, d1, true, &i1);
346 E : test_shadow.PoisonAllocatedBlock(i1);
347 :
348 E : test_shadow.MarkAsFreed(i0.body, i0.body_size);
349 E : for (uint8_t* p = i0.RawBlock(); p < i0.RawBlock() + i0.block_size; ++p) {
350 E : if (p >= i0.RawBlock() && p < i0.RawBody()) {
351 E : EXPECT_TRUE(test_shadow.IsLeftRedzone(p));
352 E : } else if (p >= i0.RawBody() &&
353 : p < i0.RawTrailerPadding()) {
354 E : if (p >= i1.RawBlock() && p < i1.RawBody()) {
355 E : EXPECT_TRUE(test_shadow.IsLeftRedzone(p));
356 E : } else if (p >= i1.RawBody() && p < i1.RawTrailerPadding()) {
357 E : EXPECT_EQ(kHeapFreedMarker,
358 E : test_shadow.GetShadowMarkerForAddress(p));
359 E : } else if (p >= i1.RawTrailerPadding() &&
360 : p < i1.RawBlock() + i1.block_size) {
361 E : EXPECT_TRUE(test_shadow.IsRightRedzone(p));
362 E : } else {
363 E : EXPECT_EQ(kHeapFreedMarker,
364 E : test_shadow.GetShadowMarkerForAddress(p));
365 E : }
366 E : } else if (p >= i0.RawTrailerPadding() &&
367 : p < i0.RawBlock() + i0.block_size) {
368 E : EXPECT_TRUE(test_shadow.IsRightRedzone(p));
369 : }
370 E : }
371 :
372 E : test_shadow.Unpoison(data, l0.block_size);
373 E : delete [] data;
374 E : }
375 :
376 E : TEST_F(ShadowTest, PoisonAllocatedBlock) {
377 E : BlockLayout layout = {};
378 E : EXPECT_TRUE(BlockPlanLayout(kShadowRatio, kShadowRatio, 15, 22, 0, &layout));
379 :
380 E : uint8_t* data = new uint8_t[layout.block_size];
381 E : BlockInfo info = {};
382 E : BlockInitialize(layout, data, false, &info);
383 :
384 E : test_shadow.PoisonAllocatedBlock(info);
385 E : EXPECT_EQ(test_shadow.GetShadowMarkerForAddress(data + 0 * 8),
386 E : kHeapBlockStartMarker0 | 7);
387 E : EXPECT_EQ(test_shadow.GetShadowMarkerForAddress(data + 1 * 8),
388 E : kHeapLeftPaddingMarker);
389 E : EXPECT_EQ(test_shadow.GetShadowMarkerForAddress(data + 2 * 8),
390 E : kHeapLeftPaddingMarker);
391 E : EXPECT_EQ(test_shadow.GetShadowMarkerForAddress(data + 3 * 8),
392 E : 0);
393 E : EXPECT_EQ(test_shadow.GetShadowMarkerForAddress(data + 4 * 8),
394 E : 7);
395 E : EXPECT_EQ(test_shadow.GetShadowMarkerForAddress(data + 5 * 8),
396 E : kHeapRightPaddingMarker);
397 E : EXPECT_EQ(test_shadow.GetShadowMarkerForAddress(data + 6 * 8),
398 E : kHeapRightPaddingMarker);
399 E : EXPECT_EQ(test_shadow.GetShadowMarkerForAddress(data + 7 * 8),
400 E : kHeapBlockEndMarker);
401 :
402 E : uint8_t* cursor = info.RawHeader();
403 E : for (; cursor < info.RawBody(); ++cursor)
404 E : EXPECT_FALSE(test_shadow.IsAccessible(cursor));
405 E : for (; cursor < info.RawBody() + info.body_size; ++cursor)
406 E : EXPECT_TRUE(test_shadow.IsAccessible(cursor));
407 E : for (; cursor < info.RawHeader() + info.block_size; ++cursor)
408 E : EXPECT_FALSE(test_shadow.IsAccessible(cursor));
409 E : test_shadow.Unpoison(info.RawBlock(), info.block_size);
410 :
411 E : delete [] data;
412 E : }
413 :
414 E : TEST_F(ShadowTest, ScanLeftAndRight) {
415 E : size_t offset = test_shadow.length() / 2;
416 E : size_t l = 0;
417 E : test_shadow.shadow_[offset + 0] = kHeapBlockStartMarker0;
418 E : test_shadow.shadow_[offset + 1] = kHeapNestedBlockStartMarker0;
419 E : test_shadow.shadow_[offset + 2] = kHeapAddressableMarker;
420 E : test_shadow.shadow_[offset + 3] = kHeapNestedBlockEndMarker;
421 E : test_shadow.shadow_[offset + 4] = kHeapBlockEndMarker;
422 :
423 E : EXPECT_TRUE(test_shadow.ScanLeftForBracketingBlockStart(0, offset + 0, &l));
424 E : EXPECT_EQ(offset, l);
425 E : EXPECT_TRUE(test_shadow.ScanLeftForBracketingBlockStart(0, offset + 1, &l));
426 E : EXPECT_EQ(offset + 1, l);
427 E : EXPECT_TRUE(test_shadow.ScanLeftForBracketingBlockStart(0, offset + 2, &l));
428 E : EXPECT_EQ(offset + 1, l);
429 E : EXPECT_TRUE(test_shadow.ScanLeftForBracketingBlockStart(0, offset + 3, &l));
430 E : EXPECT_EQ(offset + 1, l);
431 E : EXPECT_TRUE(test_shadow.ScanLeftForBracketingBlockStart(0, offset + 4, &l));
432 E : EXPECT_EQ(offset, l);
433 :
434 E : EXPECT_FALSE(test_shadow.ScanLeftForBracketingBlockStart(1, offset + 0, &l));
435 E : EXPECT_TRUE(test_shadow.ScanLeftForBracketingBlockStart(1, offset + 1, &l));
436 E : EXPECT_EQ(offset, l);
437 E : EXPECT_TRUE(test_shadow.ScanLeftForBracketingBlockStart(1, offset + 2, &l));
438 E : EXPECT_EQ(offset, l);
439 E : EXPECT_TRUE(test_shadow.ScanLeftForBracketingBlockStart(1, offset + 3, &l));
440 E : EXPECT_EQ(offset, l);
441 E : EXPECT_FALSE(test_shadow.ScanLeftForBracketingBlockStart(1, offset + 4, &l));
442 :
443 E : EXPECT_FALSE(test_shadow.ScanLeftForBracketingBlockStart(2, offset + 0, &l));
444 E : EXPECT_FALSE(test_shadow.ScanLeftForBracketingBlockStart(2, offset + 1, &l));
445 E : EXPECT_FALSE(test_shadow.ScanLeftForBracketingBlockStart(2, offset + 2, &l));
446 E : EXPECT_FALSE(test_shadow.ScanLeftForBracketingBlockStart(2, offset + 3, &l));
447 E : EXPECT_FALSE(test_shadow.ScanLeftForBracketingBlockStart(2, offset + 4, &l));
448 :
449 E : EXPECT_TRUE(test_shadow.ScanRightForBracketingBlockEnd(0, offset + 0, &l));
450 E : EXPECT_EQ(offset + 4, l);
451 E : EXPECT_TRUE(test_shadow.ScanRightForBracketingBlockEnd(0, offset + 1, &l));
452 E : EXPECT_EQ(offset + 3, l);
453 E : EXPECT_TRUE(test_shadow.ScanRightForBracketingBlockEnd(0, offset + 2, &l));
454 E : EXPECT_EQ(offset + 3, l);
455 E : EXPECT_TRUE(test_shadow.ScanRightForBracketingBlockEnd(0, offset + 3, &l));
456 E : EXPECT_EQ(offset + 3, l);
457 E : EXPECT_TRUE(test_shadow.ScanRightForBracketingBlockEnd(0, offset + 4, &l));
458 E : EXPECT_EQ(offset + 4, l);
459 :
460 E : EXPECT_FALSE(test_shadow.ScanRightForBracketingBlockEnd(1, offset + 0, &l));
461 E : EXPECT_TRUE(test_shadow.ScanRightForBracketingBlockEnd(1, offset + 1, &l));
462 E : EXPECT_EQ(offset + 4, l);
463 E : EXPECT_TRUE(test_shadow.ScanRightForBracketingBlockEnd(1, offset + 2, &l));
464 E : EXPECT_EQ(offset + 4, l);
465 E : EXPECT_TRUE(test_shadow.ScanRightForBracketingBlockEnd(1, offset + 3, &l));
466 E : EXPECT_EQ(offset + 4, l);
467 E : EXPECT_FALSE(test_shadow.ScanRightForBracketingBlockEnd(1, offset + 4, &l));
468 :
469 E : EXPECT_FALSE(test_shadow.ScanRightForBracketingBlockEnd(2, offset + 0, &l));
470 E : EXPECT_FALSE(test_shadow.ScanRightForBracketingBlockEnd(2, offset + 1, &l));
471 E : EXPECT_FALSE(test_shadow.ScanRightForBracketingBlockEnd(2, offset + 2, &l));
472 E : EXPECT_FALSE(test_shadow.ScanRightForBracketingBlockEnd(2, offset + 3, &l));
473 E : EXPECT_FALSE(test_shadow.ScanRightForBracketingBlockEnd(2, offset + 4, &l));
474 :
475 E : ::memset(test_shadow.shadow_ + offset, 0, 5);
476 E : }
477 :
478 E : TEST_F(ShadowTest, ScanRightPerfTest) {
479 E : size_t offset = test_shadow.length() / 2;
480 E : size_t length = 1 * 1024 * 1024;
481 :
482 E : ::memset(test_shadow.shadow_ + offset, 0, length);
483 :
484 E : test_shadow.shadow_[offset + 0] = kHeapBlockStartMarker0;
485 : // A nested block with freed contents.
486 E : test_shadow.shadow_[offset + 50] = kHeapNestedBlockStartMarker0;
487 E : ::memset(test_shadow.shadow_ + offset + 51, kHeapFreedMarker, 8);
488 E : test_shadow.shadow_[offset + 60] = kHeapNestedBlockEndMarker;
489 : // A nested block with a nested block.
490 E : test_shadow.shadow_[offset + 100000] = kHeapNestedBlockStartMarker0;
491 E : test_shadow.shadow_[offset + 100100] = kHeapNestedBlockStartMarker0;
492 E : test_shadow.shadow_[offset + 100400] = kHeapNestedBlockEndMarker;
493 E : test_shadow.shadow_[offset + 200000] = kHeapNestedBlockEndMarker;
494 : // The end of the outer block.
495 E : test_shadow.shadow_[offset + length - 1] = kHeapBlockEndMarker;
496 :
497 E : uint64_t tnet = 0;
498 E : for (size_t i = 0; i < 100; ++i) {
499 E : size_t l = 0;
500 E : uint64_t t0 = ::__rdtsc();
501 E : test_shadow.ScanRightForBracketingBlockEnd(0, offset + 1, &l);
502 E : uint64_t t1 = ::__rdtsc();
503 E : tnet += t1 - t0;
504 E : }
505 E : testing::EmitMetric("Syzygy.Asan.Shadow.ScanRightForBracketingBlockEnd",
506 : tnet);
507 :
508 : // Reset the shadow memory.
509 E : ::memset(test_shadow.shadow_ + offset, 0, length);
510 E : }
511 :
512 E : TEST_F(ShadowTest, IsLeftOrRightRedzone) {
513 E : BlockLayout layout = {};
514 E : const size_t kAllocSize = 15;
515 E : ASSERT_NE(0U, kAllocSize % kShadowRatio);
516 E : EXPECT_TRUE(BlockPlanLayout(kShadowRatio, kShadowRatio, kAllocSize, 0, 0,
517 E : &layout));
518 :
519 E : std::unique_ptr<uint8_t[]> data(new uint8_t[layout.block_size]);
520 E : BlockInfo info = {};
521 E : BlockInitialize(layout, data.get(), false, &info);
522 :
523 E : test_shadow.PoisonAllocatedBlock(info);
524 E : uint8_t* block = reinterpret_cast<uint8_t*>(info.header);
525 E : uint8_t* cursor = block;
526 :
527 E : for (; cursor < info.RawBody(); ++cursor) {
528 E : EXPECT_TRUE(test_shadow.IsLeftRedzone(cursor));
529 E : EXPECT_FALSE(test_shadow.IsRightRedzone(cursor));
530 E : }
531 E : for (; cursor < info.RawBody() + info.body_size; ++cursor) {
532 E : EXPECT_FALSE(test_shadow.IsLeftRedzone(cursor));
533 E : EXPECT_FALSE(test_shadow.IsRightRedzone(cursor));
534 E : }
535 E : for (; cursor < block + info.block_size; ++cursor) {
536 E : EXPECT_FALSE(test_shadow.IsLeftRedzone(cursor));
537 E : EXPECT_TRUE(test_shadow.IsRightRedzone(cursor));
538 E : }
539 :
540 E : test_shadow.Unpoison(block, info.block_size);
541 E : }
542 :
543 : namespace {
544 :
545 : void TestBlockInfoFromShadow(Shadow* shadow,
546 : const BlockLayout& outer,
547 E : const BlockLayout& nested) {
548 E : ASSERT_TRUE(shadow != nullptr);
549 E : ASSERT_LE(nested.block_size, outer.body_size);
550 :
551 E : uint8_t* data = new uint8_t[outer.block_size];
552 :
553 : // Try recovering the block from every position within it when no nested
554 : // block exists. Expect finding a nested block to fail.
555 E : BlockInfo info = {};
556 E : BlockInitialize(outer, data, false, &info);
557 E : shadow->PoisonAllocatedBlock(info);
558 E : BlockInfo info_recovered = {};
559 E : for (size_t i = 0; i < info.block_size; ++i) {
560 E : EXPECT_TRUE(shadow->BlockInfoFromShadow(
561 E : info.RawBlock() + i, &info_recovered));
562 E : EXPECT_EQ(0, ::memcmp(&info, &info_recovered, sizeof(info)));
563 :
564 : // This block should have no parent block as its not nested.
565 E : EXPECT_FALSE(shadow->ParentBlockInfoFromShadow(
566 E : info, &info_recovered));
567 E : }
568 :
569 : // Place a nested block and try the recovery from every position again.
570 E : size_t padding = ::common::AlignDown(info.body_size - nested.block_size,
571 : kShadowRatio * 2);
572 E : uint8_t* nested_begin = info.RawBody() + padding / 2;
573 E : uint8_t* nested_end = nested_begin + nested.block_size;
574 E : BlockInfo nested_info = {};
575 E : BlockInitialize(nested, nested_begin, true, &nested_info);
576 E : nested_info.header->is_nested = true;
577 E : shadow->PoisonAllocatedBlock(nested_info);
578 E : for (size_t i = 0; i < info.block_size; ++i) {
579 E : uint8_t* pos = info.RawBlock() + i;
580 E : EXPECT_TRUE(shadow->BlockInfoFromShadow(pos, &info_recovered));
581 :
582 E : BlockInfo parent_info = {};
583 E : bool found_parent = shadow->ParentBlockInfoFromShadow(
584 : info_recovered, &parent_info);
585 :
586 E : if (pos >= nested_begin && pos < nested_end) {
587 E : EXPECT_EQ(0, ::memcmp(&nested_info, &info_recovered,
588 E : sizeof(nested_info)));
589 E : EXPECT_TRUE(found_parent);
590 E : EXPECT_EQ(0, ::memcmp(&info, &parent_info, sizeof(info)));
591 E : } else {
592 E : EXPECT_EQ(0, ::memcmp(&info, &info_recovered, sizeof(info)));
593 E : EXPECT_FALSE(found_parent);
594 : }
595 E : }
596 E : shadow->Unpoison(info.header, info.block_size);
597 :
598 E : delete [] data;
599 E : }
600 :
601 : } // namespace
602 :
603 E : TEST_F(ShadowTest, BlockInfoFromShadow) {
604 : // This is a simple layout that will be nested inside of another block.
605 E : BlockLayout layout0 = {};
606 E : EXPECT_TRUE(BlockPlanLayout(kShadowRatio, kShadowRatio, 6, 0, 0, &layout0));
607 :
608 : // Plan two layouts, one with padding and another with none. The first has
609 : // exactly enough space for the nested block, while the second has room to
610 : // spare.
611 E : BlockLayout layout1 = {};
612 E : BlockLayout layout2 = {};
613 E : EXPECT_TRUE(BlockPlanLayout(kShadowRatio, kShadowRatio,
614 : ::common::AlignUp(layout0.block_size, kShadowRatio) + 4, 0, 0,
615 E : &layout1));
616 E : ASSERT_EQ(0u, layout1.header_padding_size);
617 E : ASSERT_EQ(0u, layout1.trailer_padding_size);
618 E : EXPECT_TRUE(BlockPlanLayout(kShadowRatio, kShadowRatio,
619 E : layout0.block_size + 2 * kShadowRatio, 32, 13, &layout2));
620 E : ASSERT_LT(0u, layout2.header_padding_size);
621 E : ASSERT_LT(0u, layout2.trailer_padding_size);
622 :
623 E : EXPECT_NO_FATAL_FAILURE(TestBlockInfoFromShadow(
624 E : &test_shadow, layout1, layout0));
625 E : EXPECT_NO_FATAL_FAILURE(TestBlockInfoFromShadow(
626 E : &test_shadow, layout2, layout0));
627 E : }
628 :
629 E : TEST_F(ShadowTest, IsBeginningOfBlockBody) {
630 E : BlockLayout l = {};
631 E : EXPECT_TRUE(BlockPlanLayout(kShadowRatio, kShadowRatio, 7, 0, 0, &l));
632 :
633 E : size_t data_size = l.block_size;
634 E : std::unique_ptr<uint8_t[]> data(new uint8_t[data_size]);
635 :
636 E : BlockInfo block_info = {};
637 E : BlockInitialize(l, data.get(), false, &block_info);
638 :
639 E : test_shadow.PoisonAllocatedBlock(block_info);
640 :
641 E : EXPECT_TRUE(test_shadow.IsBeginningOfBlockBody(block_info.body));
642 E : EXPECT_FALSE(test_shadow.IsBeginningOfBlockBody(data.get()));
643 :
644 E : block_info.header->state = QUARANTINED_BLOCK;
645 E : test_shadow.MarkAsFreed(block_info.body, block_info.body_size);
646 :
647 E : EXPECT_TRUE(test_shadow.IsBeginningOfBlockBody(block_info.body));
648 E : EXPECT_FALSE(test_shadow.IsBeginningOfBlockBody(data.get()));
649 :
650 E : test_shadow.Unpoison(data.get(), data_size);
651 E : }
652 :
653 E : TEST_F(ShadowTest, IsBeginningOfBlockBodyForBlockOfSizeZero) {
654 E : BlockLayout l = {};
655 E : EXPECT_TRUE(BlockPlanLayout(kShadowRatio, kShadowRatio, 0, 0, 0, &l));
656 :
657 E : size_t data_size = l.block_size;
658 E : std::unique_ptr<uint8_t[]> data(new uint8_t[data_size]);
659 :
660 E : BlockInfo block_info = {};
661 E : BlockInitialize(l, data.get(), false, &block_info);
662 :
663 E : test_shadow.PoisonAllocatedBlock(block_info);
664 :
665 E : EXPECT_TRUE(test_shadow.IsBeginningOfBlockBody(block_info.body));
666 E : EXPECT_FALSE(test_shadow.IsBeginningOfBlockBody(data.get()));
667 :
668 E : block_info.header->state = QUARANTINED_FLOODED_BLOCK;
669 E : test_shadow.MarkAsFreed(block_info.body, block_info.body_size);
670 :
671 E : EXPECT_TRUE(test_shadow.IsBeginningOfBlockBody(block_info.body));
672 E : EXPECT_FALSE(test_shadow.IsBeginningOfBlockBody(data.get()));
673 :
674 E : test_shadow.Unpoison(data.get(), data_size);
675 E : }
676 :
677 E : TEST_F(ShadowTest, MarkAsFreedPerfTest) {
678 E : std::vector<uint8_t> buf;
679 E : buf.resize(10 * 1024 * 1024, 0);
680 :
681 E : uint64_t tnet = 0;
682 E : for (size_t i = 0; i < 1000; ++i) {
683 E : test_shadow.Unpoison(buf.data(), buf.size());
684 E : uint64_t t0 = ::__rdtsc();
685 E : test_shadow.MarkAsFreed(buf.data(), buf.size());
686 E : uint64_t t1 = ::__rdtsc();
687 E : tnet += t1 - t0;
688 E : test_shadow.Unpoison(buf.data(), buf.size());
689 E : }
690 E : testing::EmitMetric("Syzygy.Asan.Shadow.MarkAsFreed", tnet);
691 E : }
692 :
693 E : TEST_F(ShadowTest, PageBits) {
694 : // Set an individual page.
695 E : const uint8_t* addr = reinterpret_cast<const uint8_t*>(16 * 4096);
696 E : EXPECT_FALSE(test_shadow.PageIsProtected(addr));
697 E : test_shadow.MarkPageProtected(addr);
698 E : EXPECT_TRUE(test_shadow.PageIsProtected(addr));
699 E : test_shadow.MarkPageProtected(addr);
700 E : EXPECT_TRUE(test_shadow.PageIsProtected(addr));
701 E : test_shadow.MarkPageUnprotected(addr);
702 E : EXPECT_FALSE(test_shadow.PageIsProtected(addr));
703 E : test_shadow.MarkPageUnprotected(addr);
704 E : EXPECT_FALSE(test_shadow.PageIsProtected(addr));
705 :
706 : // Set a range of pages at once.
707 E : const uint8_t* addr2 = addr + 4096;
708 E : EXPECT_FALSE(test_shadow.PageIsProtected(addr - 4096));
709 E : EXPECT_FALSE(test_shadow.PageIsProtected(addr));
710 E : EXPECT_FALSE(test_shadow.PageIsProtected(addr2));
711 E : EXPECT_FALSE(test_shadow.PageIsProtected(addr2 + 4096));
712 E : test_shadow.MarkPagesProtected(addr, 2 * 4096);
713 E : EXPECT_FALSE(test_shadow.PageIsProtected(addr - 4096));
714 E : EXPECT_TRUE(test_shadow.PageIsProtected(addr));
715 E : EXPECT_TRUE(test_shadow.PageIsProtected(addr2));
716 E : EXPECT_FALSE(test_shadow.PageIsProtected(addr2 + 4096));
717 E : test_shadow.MarkPagesProtected(addr, 2 * 4096);
718 E : EXPECT_FALSE(test_shadow.PageIsProtected(addr - 4096));
719 E : EXPECT_TRUE(test_shadow.PageIsProtected(addr));
720 E : EXPECT_TRUE(test_shadow.PageIsProtected(addr2));
721 E : EXPECT_FALSE(test_shadow.PageIsProtected(addr2 + 4096));
722 E : test_shadow.MarkPagesUnprotected(addr, 2 * 4096);
723 E : EXPECT_FALSE(test_shadow.PageIsProtected(addr - 4096));
724 E : EXPECT_FALSE(test_shadow.PageIsProtected(addr));
725 E : EXPECT_FALSE(test_shadow.PageIsProtected(addr2));
726 E : EXPECT_FALSE(test_shadow.PageIsProtected(addr2 + 4096));
727 E : test_shadow.MarkPagesUnprotected(addr, 2 * 4096);
728 E : EXPECT_FALSE(test_shadow.PageIsProtected(addr - 4096));
729 E : EXPECT_FALSE(test_shadow.PageIsProtected(addr));
730 E : EXPECT_FALSE(test_shadow.PageIsProtected(addr2));
731 E : EXPECT_FALSE(test_shadow.PageIsProtected(addr2 + 4096));
732 E : }
733 :
734 : namespace {
735 :
736 : // A fixture for shadow walker tests.
737 : class ShadowWalkerTest : public testing::Test {
738 : public:
739 : TestShadow test_shadow;
740 : };
741 :
742 : } // namespace
743 :
744 E : TEST_F(ShadowWalkerTest, WalksNonNestedBlocks) {
745 E : BlockLayout l = {};
746 E : EXPECT_TRUE(BlockPlanLayout(kShadowRatio, kShadowRatio, 7, 0, 0, &l));
747 :
748 E : size_t data_size = l.block_size * 3 + kShadowRatio;
749 E : uint8_t* data = new uint8_t[data_size];
750 E : uint8_t* data0 = data;
751 E : uint8_t* data1 = data0 + l.block_size + kShadowRatio;
752 E : uint8_t* data2 = data1 + l.block_size;
753 :
754 E : BlockInfo i0 = {}, i1 = {}, i2 = {};
755 E : BlockInitialize(l, data0, false, &i0);
756 E : BlockInitialize(l, data1, false, &i1);
757 E : BlockInitialize(l, data2, false, &i2);
758 :
759 E : test_shadow.PoisonAllocatedBlock(i0);
760 E : test_shadow.PoisonAllocatedBlock(i1);
761 E : test_shadow.PoisonAllocatedBlock(i2);
762 :
763 E : i2.header->state = QUARANTINED_BLOCK;
764 E : test_shadow.MarkAsFreed(i2.body, i2.body_size);
765 :
766 : // Do a non-recursive walk through the shadow.
767 E : BlockInfo i = {};
768 E : ShadowWalker w0(&test_shadow, false, data, data + data_size);
769 E : EXPECT_EQ(-1, w0.nesting_depth());
770 E : EXPECT_TRUE(w0.Next(&i));
771 E : EXPECT_EQ(0, w0.nesting_depth());
772 E : EXPECT_TRUE(w0.Next(&i));
773 E : EXPECT_EQ(0, w0.nesting_depth());
774 E : EXPECT_TRUE(w0.Next(&i));
775 E : EXPECT_EQ(0, w0.nesting_depth());
776 E : EXPECT_FALSE(w0.Next(&i));
777 E : EXPECT_EQ(-1, w0.nesting_depth());
778 :
779 : // Walk recursively through the shadow and expect the same results.
780 E : ShadowWalker w1(&test_shadow, true, data, data + data_size);
781 E : EXPECT_EQ(-1, w1.nesting_depth());
782 E : EXPECT_TRUE(w1.Next(&i));
783 E : EXPECT_EQ(0, w1.nesting_depth());
784 E : EXPECT_EQ(0, ::memcmp(&i, &i0, sizeof(i)));
785 E : EXPECT_TRUE(w1.Next(&i));
786 E : EXPECT_EQ(0, w1.nesting_depth());
787 E : EXPECT_EQ(0, ::memcmp(&i, &i1, sizeof(i)));
788 E : EXPECT_TRUE(w1.Next(&i));
789 E : EXPECT_EQ(0, w1.nesting_depth());
790 E : EXPECT_EQ(0, ::memcmp(&i, &i2, sizeof(i)));
791 E : EXPECT_FALSE(w1.Next(&i));
792 E : EXPECT_EQ(-1, w1.nesting_depth());
793 :
794 E : test_shadow.Unpoison(data, data_size);
795 E : delete [] data;
796 E : }
797 :
798 E : TEST_F(ShadowWalkerTest, WalksNestedBlocks) {
799 E : BlockLayout b0 = {}, b1 = {}, b2 = {}, b00 = {}, b01 = {}, b10 = {},
800 E : b100 = {};
801 E : EXPECT_TRUE(BlockPlanLayout(kShadowRatio, kShadowRatio, 15, 30, 30, &b00));
802 E : EXPECT_TRUE(BlockPlanLayout(kShadowRatio, kShadowRatio, 7, 0, 0, &b01));
803 E : EXPECT_TRUE(BlockPlanLayout(kShadowRatio, kShadowRatio,
804 E : b00.block_size + b01.block_size + kShadowRatio, 0, 0, &b0));
805 E : EXPECT_TRUE(BlockPlanLayout(kShadowRatio, kShadowRatio, 7, 0, 0, &b100));
806 E : EXPECT_TRUE(BlockPlanLayout(kShadowRatio, kShadowRatio, b100.block_size, 0, 0,
807 E : &b10));
808 E : EXPECT_TRUE(BlockPlanLayout(kShadowRatio, kShadowRatio, b10.block_size, 0, 0,
809 E : &b1));
810 E : EXPECT_TRUE(BlockPlanLayout(kShadowRatio, kShadowRatio, 100, 0, 0, &b2));
811 :
812 E : size_t data_size = b0.block_size + b1.block_size + kShadowRatio +
813 : b2.block_size;
814 E : uint8_t* data = new uint8_t[data_size];
815 :
816 : // Initialize the depth 0 blocks.
817 E : uint8_t* d0 = data;
818 E : uint8_t* d1 = d0 + b0.block_size;
819 E : uint8_t* d2 = d1 + b1.block_size + kShadowRatio;
820 E : BlockInfo i0 = {}, i1 = {}, i2 = {};
821 E : BlockInitialize(b0, d0, false, &i0);
822 E : BlockInitialize(b1, d1, false, &i1);
823 E : BlockInitialize(b2, d2, false, &i2);
824 E : test_shadow.PoisonAllocatedBlock(i0);
825 E : test_shadow.PoisonAllocatedBlock(i1);
826 E : test_shadow.PoisonAllocatedBlock(i2);
827 :
828 : // Initialize depth 1 blocks.
829 E : uint8_t* d00 = i0.RawBody();
830 E : uint8_t* d01 = d00 + b00.block_size + kShadowRatio;
831 E : uint8_t* d10 = i1.RawBody();
832 E : BlockInfo i00 = {}, i01 = {}, i10 = {};
833 E : BlockInitialize(b00, d00, true, &i00);
834 E : BlockInitialize(b01, d01, true, &i01);
835 E : BlockInitialize(b10, d10, true, &i10);
836 E : test_shadow.PoisonAllocatedBlock(i00);
837 E : test_shadow.PoisonAllocatedBlock(i01);
838 E : test_shadow.PoisonAllocatedBlock(i10);
839 :
840 : // Initialize depth 2 blocks.
841 E : uint8_t* d100 = i10.RawBody();
842 E : BlockInfo i100 = {};
843 E : BlockInitialize(b100, d100, true, &i100);
844 E : test_shadow.PoisonAllocatedBlock(i100);
845 E : i100.header->state = QUARANTINED_FLOODED_BLOCK;
846 E : test_shadow.MarkAsFreed(i100.body, i100.body_size);
847 :
848 : // Do a non-recursive walk through the shadow.
849 E : BlockInfo i = {};
850 E : ShadowWalker w0(&test_shadow, false, data, data + data_size);
851 E : EXPECT_EQ(-1, w0.nesting_depth());
852 E : EXPECT_TRUE(w0.Next(&i));
853 E : EXPECT_EQ(0, w0.nesting_depth());
854 E : EXPECT_EQ(0, ::memcmp(&i, &i0, sizeof(i)));
855 E : EXPECT_TRUE(w0.Next(&i));
856 E : EXPECT_EQ(0, w0.nesting_depth());
857 E : EXPECT_EQ(0, ::memcmp(&i, &i1, sizeof(i)));
858 E : EXPECT_TRUE(w0.Next(&i));
859 E : EXPECT_EQ(0, w0.nesting_depth());
860 E : EXPECT_EQ(0, ::memcmp(&i, &i2, sizeof(i)));
861 E : EXPECT_FALSE(w0.Next(&i));
862 E : EXPECT_EQ(-1, w0.nesting_depth());
863 :
864 : // Walk recursively through the shadow.
865 E : ShadowWalker w1(&test_shadow, true, data, data + data_size);
866 E : EXPECT_EQ(-1, w1.nesting_depth());
867 E : EXPECT_TRUE(w1.Next(&i));
868 E : EXPECT_EQ(0, w1.nesting_depth());
869 E : EXPECT_EQ(0, ::memcmp(&i, &i0, sizeof(i)));
870 E : EXPECT_TRUE(w1.Next(&i));
871 E : EXPECT_EQ(1, w1.nesting_depth());
872 E : EXPECT_EQ(0, ::memcmp(&i, &i00, sizeof(i)));
873 E : EXPECT_TRUE(w1.Next(&i));
874 E : EXPECT_EQ(1, w1.nesting_depth());
875 E : EXPECT_EQ(0, ::memcmp(&i, &i01, sizeof(i)));
876 E : EXPECT_TRUE(w1.Next(&i));
877 E : EXPECT_EQ(0, w1.nesting_depth());
878 E : EXPECT_EQ(0, ::memcmp(&i, &i1, sizeof(i)));
879 E : EXPECT_TRUE(w1.Next(&i));
880 E : EXPECT_EQ(1, w1.nesting_depth());
881 E : EXPECT_EQ(0, ::memcmp(&i, &i10, sizeof(i)));
882 E : EXPECT_TRUE(w1.Next(&i));
883 E : EXPECT_EQ(2, w1.nesting_depth());
884 E : EXPECT_EQ(0, ::memcmp(&i, &i100, sizeof(i)));
885 E : EXPECT_TRUE(w1.Next(&i));
886 E : EXPECT_EQ(0, w1.nesting_depth());
887 E : EXPECT_EQ(0, ::memcmp(&i, &i2, sizeof(i)));
888 E : EXPECT_FALSE(w1.Next(&i));
889 E : EXPECT_EQ(-1, w1.nesting_depth());
890 :
891 E : test_shadow.Unpoison(data, data_size);
892 E : delete [] data;
893 E : }
894 :
895 : } // namespace asan
896 : } // namespace agent
|