arena_test.cc 6.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204
  1. // Copyright (c) 2011-present, Facebook, Inc. All rights reserved.
  2. // This source code is licensed under both the GPLv2 (found in the
  3. // COPYING file in the root directory) and Apache 2.0 License
  4. // (found in the LICENSE.Apache file in the root directory).
  5. //
  6. // Copyright (c) 2011 The LevelDB Authors. All rights reserved.
  7. // Use of this source code is governed by a BSD-style license that can be
  8. // found in the LICENSE file. See the AUTHORS file for names of contributors.
  9. #include "memory/arena.h"
  10. #include "test_util/testharness.h"
  11. #include "util/random.h"
  12. namespace ROCKSDB_NAMESPACE {
  13. namespace {
  14. const size_t kHugePageSize = 2 * 1024 * 1024;
  15. } // namespace
  16. class ArenaTest : public testing::Test {};
  17. TEST_F(ArenaTest, Empty) { Arena arena0; }
  18. namespace {
  19. bool CheckMemoryAllocated(size_t allocated, size_t expected) {
  20. // The value returned by Arena::MemoryAllocatedBytes() may be greater than
  21. // the requested memory. We choose a somewhat arbitrary upper bound of
  22. // max_expected = expected * 1.1 to detect critical overallocation.
  23. size_t max_expected = expected + expected / 10;
  24. return allocated >= expected && allocated <= max_expected;
  25. }
  26. void MemoryAllocatedBytesTest(size_t huge_page_size) {
  27. const int N = 17;
  28. size_t req_sz; // requested size
  29. size_t bsz = 32 * 1024; // block size
  30. size_t expected_memory_allocated;
  31. Arena arena(bsz, nullptr, huge_page_size);
  32. // requested size > quarter of a block:
  33. // allocate requested size separately
  34. req_sz = 12 * 1024;
  35. for (int i = 0; i < N; i++) {
  36. arena.Allocate(req_sz);
  37. }
  38. expected_memory_allocated = req_sz * N + Arena::kInlineSize;
  39. ASSERT_PRED2(CheckMemoryAllocated, arena.MemoryAllocatedBytes(),
  40. expected_memory_allocated);
  41. arena.Allocate(Arena::kInlineSize - 1);
  42. // requested size < quarter of a block:
  43. // allocate a block with the default size, then try to use unused part
  44. // of the block. So one new block will be allocated for the first
  45. // Allocate(99) call. All the remaining calls won't lead to new allocation.
  46. req_sz = 99;
  47. for (int i = 0; i < N; i++) {
  48. arena.Allocate(req_sz);
  49. }
  50. if (huge_page_size) {
  51. ASSERT_TRUE(
  52. CheckMemoryAllocated(arena.MemoryAllocatedBytes(),
  53. expected_memory_allocated + bsz) ||
  54. CheckMemoryAllocated(arena.MemoryAllocatedBytes(),
  55. expected_memory_allocated + huge_page_size));
  56. } else {
  57. expected_memory_allocated += bsz;
  58. ASSERT_PRED2(CheckMemoryAllocated, arena.MemoryAllocatedBytes(),
  59. expected_memory_allocated);
  60. }
  61. // requested size > size of a block:
  62. // allocate requested size separately
  63. expected_memory_allocated = arena.MemoryAllocatedBytes();
  64. req_sz = 8 * 1024 * 1024;
  65. for (int i = 0; i < N; i++) {
  66. arena.Allocate(req_sz);
  67. }
  68. expected_memory_allocated += req_sz * N;
  69. ASSERT_PRED2(CheckMemoryAllocated, arena.MemoryAllocatedBytes(),
  70. expected_memory_allocated);
  71. }
  72. // Make sure we didn't count the allocate but not used memory space in
  73. // Arena::ApproximateMemoryUsage()
  74. static void ApproximateMemoryUsageTest(size_t huge_page_size) {
  75. const size_t kBlockSize = 4096;
  76. const size_t kEntrySize = kBlockSize / 8;
  77. const size_t kZero = 0;
  78. Arena arena(kBlockSize, nullptr, huge_page_size);
  79. ASSERT_EQ(kZero, arena.ApproximateMemoryUsage());
  80. // allocate inline bytes
  81. const size_t kAlignUnit = alignof(max_align_t);
  82. EXPECT_TRUE(arena.IsInInlineBlock());
  83. arena.AllocateAligned(kAlignUnit);
  84. EXPECT_TRUE(arena.IsInInlineBlock());
  85. arena.AllocateAligned(Arena::kInlineSize / 2 - (2 * kAlignUnit));
  86. EXPECT_TRUE(arena.IsInInlineBlock());
  87. arena.AllocateAligned(Arena::kInlineSize / 2);
  88. EXPECT_TRUE(arena.IsInInlineBlock());
  89. ASSERT_EQ(arena.ApproximateMemoryUsage(), Arena::kInlineSize - kAlignUnit);
  90. ASSERT_PRED2(CheckMemoryAllocated, arena.MemoryAllocatedBytes(),
  91. Arena::kInlineSize);
  92. auto num_blocks = kBlockSize / kEntrySize;
  93. // first allocation
  94. arena.AllocateAligned(kEntrySize);
  95. EXPECT_FALSE(arena.IsInInlineBlock());
  96. auto mem_usage = arena.MemoryAllocatedBytes();
  97. if (huge_page_size) {
  98. ASSERT_TRUE(
  99. CheckMemoryAllocated(mem_usage, kBlockSize + Arena::kInlineSize) ||
  100. CheckMemoryAllocated(mem_usage, huge_page_size + Arena::kInlineSize));
  101. } else {
  102. ASSERT_PRED2(CheckMemoryAllocated, mem_usage,
  103. kBlockSize + Arena::kInlineSize);
  104. }
  105. auto usage = arena.ApproximateMemoryUsage();
  106. ASSERT_LT(usage, mem_usage);
  107. for (size_t i = 1; i < num_blocks; ++i) {
  108. arena.AllocateAligned(kEntrySize);
  109. ASSERT_EQ(mem_usage, arena.MemoryAllocatedBytes());
  110. ASSERT_EQ(arena.ApproximateMemoryUsage(), usage + kEntrySize);
  111. EXPECT_FALSE(arena.IsInInlineBlock());
  112. usage = arena.ApproximateMemoryUsage();
  113. }
  114. if (huge_page_size) {
  115. ASSERT_TRUE(usage > mem_usage ||
  116. usage + huge_page_size - kBlockSize == mem_usage);
  117. } else {
  118. ASSERT_GT(usage, mem_usage);
  119. }
  120. }
  121. static void SimpleTest(size_t huge_page_size) {
  122. std::vector<std::pair<size_t, char*>> allocated;
  123. Arena arena(Arena::kMinBlockSize, nullptr, huge_page_size);
  124. const int N = 100000;
  125. size_t bytes = 0;
  126. Random rnd(301);
  127. for (int i = 0; i < N; i++) {
  128. size_t s;
  129. if (i % (N / 10) == 0) {
  130. s = i;
  131. } else {
  132. s = rnd.OneIn(4000)
  133. ? rnd.Uniform(6000)
  134. : (rnd.OneIn(10) ? rnd.Uniform(100) : rnd.Uniform(20));
  135. }
  136. if (s == 0) {
  137. // Our arena disallows size 0 allocations.
  138. s = 1;
  139. }
  140. char* r;
  141. if (rnd.OneIn(10)) {
  142. r = arena.AllocateAligned(s);
  143. } else {
  144. r = arena.Allocate(s);
  145. }
  146. for (unsigned int b = 0; b < s; b++) {
  147. // Fill the "i"th allocation with a known bit pattern
  148. r[b] = i % 256;
  149. }
  150. bytes += s;
  151. allocated.push_back(std::make_pair(s, r));
  152. ASSERT_GE(arena.ApproximateMemoryUsage(), bytes);
  153. if (i > N / 10) {
  154. ASSERT_LE(arena.ApproximateMemoryUsage(), bytes * 1.10);
  155. }
  156. }
  157. for (unsigned int i = 0; i < allocated.size(); i++) {
  158. size_t num_bytes = allocated[i].first;
  159. const char* p = allocated[i].second;
  160. for (unsigned int b = 0; b < num_bytes; b++) {
  161. // Check the "i"th allocation for the known bit pattern
  162. ASSERT_EQ(int(p[b]) & 0xff, (int)(i % 256));
  163. }
  164. }
  165. }
  166. } // namespace
  167. TEST_F(ArenaTest, MemoryAllocatedBytes) {
  168. MemoryAllocatedBytesTest(0);
  169. MemoryAllocatedBytesTest(kHugePageSize);
  170. }
  171. TEST_F(ArenaTest, ApproximateMemoryUsage) {
  172. ApproximateMemoryUsageTest(0);
  173. ApproximateMemoryUsageTest(kHugePageSize);
  174. }
  175. TEST_F(ArenaTest, Simple) {
  176. SimpleTest(0);
  177. SimpleTest(kHugePageSize);
  178. }
  179. } // namespace ROCKSDB_NAMESPACE
  180. int main(int argc, char** argv) {
  181. ::testing::InitGoogleTest(&argc, argv);
  182. return RUN_ALL_TESTS();
  183. }