vectorrep.cc 8.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301
  1. // Copyright (c) 2011-present, Facebook, Inc. All rights reserved.
  2. // This source code is licensed under both the GPLv2 (found in the
  3. // COPYING file in the root directory) and Apache 2.0 License
  4. // (found in the LICENSE.Apache file in the root directory).
  5. //
  6. #ifndef ROCKSDB_LITE
  7. #include "rocksdb/memtablerep.h"
  8. #include <unordered_set>
  9. #include <set>
  10. #include <memory>
  11. #include <algorithm>
  12. #include <type_traits>
  13. #include "db/memtable.h"
  14. #include "memory/arena.h"
  15. #include "memtable/stl_wrappers.h"
  16. #include "port/port.h"
  17. #include "util/mutexlock.h"
  18. namespace ROCKSDB_NAMESPACE {
  19. namespace {
  20. using namespace stl_wrappers;
  21. class VectorRep : public MemTableRep {
  22. public:
  23. VectorRep(const KeyComparator& compare, Allocator* allocator, size_t count);
  24. // Insert key into the collection. (The caller will pack key and value into a
  25. // single buffer and pass that in as the parameter to Insert)
  26. // REQUIRES: nothing that compares equal to key is currently in the
  27. // collection.
  28. void Insert(KeyHandle handle) override;
  29. // Returns true iff an entry that compares equal to key is in the collection.
  30. bool Contains(const char* key) const override;
  31. void MarkReadOnly() override;
  32. size_t ApproximateMemoryUsage() override;
  33. void Get(const LookupKey& k, void* callback_args,
  34. bool (*callback_func)(void* arg, const char* entry)) override;
  35. ~VectorRep() override {}
  36. class Iterator : public MemTableRep::Iterator {
  37. class VectorRep* vrep_;
  38. std::shared_ptr<std::vector<const char*>> bucket_;
  39. std::vector<const char*>::const_iterator mutable cit_;
  40. const KeyComparator& compare_;
  41. std::string tmp_; // For passing to EncodeKey
  42. bool mutable sorted_;
  43. void DoSort() const;
  44. public:
  45. explicit Iterator(class VectorRep* vrep,
  46. std::shared_ptr<std::vector<const char*>> bucket,
  47. const KeyComparator& compare);
  48. // Initialize an iterator over the specified collection.
  49. // The returned iterator is not valid.
  50. // explicit Iterator(const MemTableRep* collection);
  51. ~Iterator() override{};
  52. // Returns true iff the iterator is positioned at a valid node.
  53. bool Valid() const override;
  54. // Returns the key at the current position.
  55. // REQUIRES: Valid()
  56. const char* key() const override;
  57. // Advances to the next position.
  58. // REQUIRES: Valid()
  59. void Next() override;
  60. // Advances to the previous position.
  61. // REQUIRES: Valid()
  62. void Prev() override;
  63. // Advance to the first entry with a key >= target
  64. void Seek(const Slice& user_key, const char* memtable_key) override;
  65. // Advance to the first entry with a key <= target
  66. void SeekForPrev(const Slice& user_key, const char* memtable_key) override;
  67. // Position at the first entry in collection.
  68. // Final state of iterator is Valid() iff collection is not empty.
  69. void SeekToFirst() override;
  70. // Position at the last entry in collection.
  71. // Final state of iterator is Valid() iff collection is not empty.
  72. void SeekToLast() override;
  73. };
  74. // Return an iterator over the keys in this representation.
  75. MemTableRep::Iterator* GetIterator(Arena* arena) override;
  76. private:
  77. friend class Iterator;
  78. typedef std::vector<const char*> Bucket;
  79. std::shared_ptr<Bucket> bucket_;
  80. mutable port::RWMutex rwlock_;
  81. bool immutable_;
  82. bool sorted_;
  83. const KeyComparator& compare_;
  84. };
  85. void VectorRep::Insert(KeyHandle handle) {
  86. auto* key = static_cast<char*>(handle);
  87. WriteLock l(&rwlock_);
  88. assert(!immutable_);
  89. bucket_->push_back(key);
  90. }
  91. // Returns true iff an entry that compares equal to key is in the collection.
  92. bool VectorRep::Contains(const char* key) const {
  93. ReadLock l(&rwlock_);
  94. return std::find(bucket_->begin(), bucket_->end(), key) != bucket_->end();
  95. }
  96. void VectorRep::MarkReadOnly() {
  97. WriteLock l(&rwlock_);
  98. immutable_ = true;
  99. }
  100. size_t VectorRep::ApproximateMemoryUsage() {
  101. return
  102. sizeof(bucket_) + sizeof(*bucket_) +
  103. bucket_->size() *
  104. sizeof(
  105. std::remove_reference<decltype(*bucket_)>::type::value_type
  106. );
  107. }
  108. VectorRep::VectorRep(const KeyComparator& compare, Allocator* allocator,
  109. size_t count)
  110. : MemTableRep(allocator),
  111. bucket_(new Bucket()),
  112. immutable_(false),
  113. sorted_(false),
  114. compare_(compare) {
  115. bucket_.get()->reserve(count);
  116. }
  117. VectorRep::Iterator::Iterator(class VectorRep* vrep,
  118. std::shared_ptr<std::vector<const char*>> bucket,
  119. const KeyComparator& compare)
  120. : vrep_(vrep),
  121. bucket_(bucket),
  122. cit_(bucket_->end()),
  123. compare_(compare),
  124. sorted_(false) { }
  125. void VectorRep::Iterator::DoSort() const {
  126. // vrep is non-null means that we are working on an immutable memtable
  127. if (!sorted_ && vrep_ != nullptr) {
  128. WriteLock l(&vrep_->rwlock_);
  129. if (!vrep_->sorted_) {
  130. std::sort(bucket_->begin(), bucket_->end(), Compare(compare_));
  131. cit_ = bucket_->begin();
  132. vrep_->sorted_ = true;
  133. }
  134. sorted_ = true;
  135. }
  136. if (!sorted_) {
  137. std::sort(bucket_->begin(), bucket_->end(), Compare(compare_));
  138. cit_ = bucket_->begin();
  139. sorted_ = true;
  140. }
  141. assert(sorted_);
  142. assert(vrep_ == nullptr || vrep_->sorted_);
  143. }
  144. // Returns true iff the iterator is positioned at a valid node.
  145. bool VectorRep::Iterator::Valid() const {
  146. DoSort();
  147. return cit_ != bucket_->end();
  148. }
  149. // Returns the key at the current position.
  150. // REQUIRES: Valid()
  151. const char* VectorRep::Iterator::key() const {
  152. assert(sorted_);
  153. return *cit_;
  154. }
  155. // Advances to the next position.
  156. // REQUIRES: Valid()
  157. void VectorRep::Iterator::Next() {
  158. assert(sorted_);
  159. if (cit_ == bucket_->end()) {
  160. return;
  161. }
  162. ++cit_;
  163. }
  164. // Advances to the previous position.
  165. // REQUIRES: Valid()
  166. void VectorRep::Iterator::Prev() {
  167. assert(sorted_);
  168. if (cit_ == bucket_->begin()) {
  169. // If you try to go back from the first element, the iterator should be
  170. // invalidated. So we set it to past-the-end. This means that you can
  171. // treat the container circularly.
  172. cit_ = bucket_->end();
  173. } else {
  174. --cit_;
  175. }
  176. }
  177. // Advance to the first entry with a key >= target
  178. void VectorRep::Iterator::Seek(const Slice& user_key,
  179. const char* memtable_key) {
  180. DoSort();
  181. // Do binary search to find first value not less than the target
  182. const char* encoded_key =
  183. (memtable_key != nullptr) ? memtable_key : EncodeKey(&tmp_, user_key);
  184. cit_ = std::equal_range(bucket_->begin(),
  185. bucket_->end(),
  186. encoded_key,
  187. [this] (const char* a, const char* b) {
  188. return compare_(a, b) < 0;
  189. }).first;
  190. }
  191. // Advance to the first entry with a key <= target
  192. void VectorRep::Iterator::SeekForPrev(const Slice& /*user_key*/,
  193. const char* /*memtable_key*/) {
  194. assert(false);
  195. }
  196. // Position at the first entry in collection.
  197. // Final state of iterator is Valid() iff collection is not empty.
  198. void VectorRep::Iterator::SeekToFirst() {
  199. DoSort();
  200. cit_ = bucket_->begin();
  201. }
  202. // Position at the last entry in collection.
  203. // Final state of iterator is Valid() iff collection is not empty.
  204. void VectorRep::Iterator::SeekToLast() {
  205. DoSort();
  206. cit_ = bucket_->end();
  207. if (bucket_->size() != 0) {
  208. --cit_;
  209. }
  210. }
  211. void VectorRep::Get(const LookupKey& k, void* callback_args,
  212. bool (*callback_func)(void* arg, const char* entry)) {
  213. rwlock_.ReadLock();
  214. VectorRep* vector_rep;
  215. std::shared_ptr<Bucket> bucket;
  216. if (immutable_) {
  217. vector_rep = this;
  218. } else {
  219. vector_rep = nullptr;
  220. bucket.reset(new Bucket(*bucket_)); // make a copy
  221. }
  222. VectorRep::Iterator iter(vector_rep, immutable_ ? bucket_ : bucket, compare_);
  223. rwlock_.ReadUnlock();
  224. for (iter.Seek(k.user_key(), k.memtable_key().data());
  225. iter.Valid() && callback_func(callback_args, iter.key()); iter.Next()) {
  226. }
  227. }
  228. MemTableRep::Iterator* VectorRep::GetIterator(Arena* arena) {
  229. char* mem = nullptr;
  230. if (arena != nullptr) {
  231. mem = arena->AllocateAligned(sizeof(Iterator));
  232. }
  233. ReadLock l(&rwlock_);
  234. // Do not sort here. The sorting would be done the first time
  235. // a Seek is performed on the iterator.
  236. if (immutable_) {
  237. if (arena == nullptr) {
  238. return new Iterator(this, bucket_, compare_);
  239. } else {
  240. return new (mem) Iterator(this, bucket_, compare_);
  241. }
  242. } else {
  243. std::shared_ptr<Bucket> tmp;
  244. tmp.reset(new Bucket(*bucket_)); // make a copy
  245. if (arena == nullptr) {
  246. return new Iterator(nullptr, tmp, compare_);
  247. } else {
  248. return new (mem) Iterator(nullptr, tmp, compare_);
  249. }
  250. }
  251. }
  252. } // anon namespace
  253. MemTableRep* VectorRepFactory::CreateMemTableRep(
  254. const MemTableRep::KeyComparator& compare, Allocator* allocator,
  255. const SliceTransform*, Logger* /*logger*/) {
  256. return new VectorRep(compare, allocator, count_);
  257. }
  258. } // namespace ROCKSDB_NAMESPACE
  259. #endif // ROCKSDB_LITE