@@ -81,10 +81,9 @@ class HashSkipListRep : public MemTableRep {
81
81
82
82
class Iterator : public MemTableRep ::Iterator {
83
83
public:
84
- explicit Iterator (Bucket* list, bool own_list = true )
85
- : list_(list),
86
- iter_(list),
87
- own_list_(own_list) {}
84
+ explicit Iterator (Bucket* list, bool own_list = true ,
85
+ Arena* arena = nullptr )
86
+ : list_(list), iter_(list), own_list_(own_list), arena_(arena) {}
88
87
89
88
virtual ~Iterator () {
90
89
// if we own the list, we should also delete it
@@ -163,6 +162,7 @@ class HashSkipListRep : public MemTableRep {
163
162
// here we track if we own list_. If we own it, we are also
164
163
// responsible for it's cleaning. This is a poor man's shared_ptr
165
164
bool own_list_;
165
+ std::unique_ptr<Arena> arena_;
166
166
std::string tmp_; // For passing to EncodeKey
167
167
};
168
168
@@ -289,7 +289,9 @@ void HashSkipListRep::Get(const LookupKey& k, void* callback_args,
289
289
}
290
290
291
291
MemTableRep::Iterator* HashSkipListRep::GetIterator () {
292
- auto list = new Bucket (compare_, arena_);
292
+ // allocate a new arena of similar size to the one currently in use
293
+ Arena* new_arena = new Arena (arena_->BlockSize ());
294
+ auto list = new Bucket (compare_, new_arena);
293
295
for (size_t i = 0 ; i < bucket_size_; ++i) {
294
296
auto bucket = GetBucket (i);
295
297
if (bucket != nullptr ) {
@@ -299,7 +301,7 @@ MemTableRep::Iterator* HashSkipListRep::GetIterator() {
299
301
}
300
302
}
301
303
}
302
- return new Iterator (list);
304
+ return new Iterator (list, true , new_arena );
303
305
}
304
306
305
307
MemTableRep::Iterator* HashSkipListRep::GetPrefixIterator (const Slice& prefix) {
0 commit comments