mirror of
https://github.com/facebook/rocksdb.git
synced 2024-11-27 02:44:18 +00:00
36a5f8ed7f
- Replace raw slice comparison with a call to user comparator. Added test for custom comparators. - Fix end of namespace comments. - Fixed bug in picking inputs for a level-0 compaction. When finding overlapping files, the covered range may expand as files are added to the input set. We now correctly expand the range when this happens instead of continuing to use the old range. For example, suppose L0 contains files with the following ranges: F1: a .. d F2: c .. g F3: f .. j and the initial compaction target is F3. We used to search for range f..j which yielded {F2,F3}. However we now expand the range as soon as another file is added. In this case, when F2 is added, we expand the range to c..j and restart the search. That picks up file F1 as well. This change fixes a bug related to deleted keys showing up incorrectly after a compaction as described in Issue 44. (Sync with upstream @25072954)
146 lines
4.7 KiB
C++
146 lines
4.7 KiB
C++
// Copyright (c) 2011 The LevelDB Authors. All rights reserved.
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
// found in the LICENSE file. See the AUTHORS file for names of contributors.
|
|
|
|
#include "db/memtable.h"
|
|
#include "db/dbformat.h"
|
|
#include "leveldb/comparator.h"
|
|
#include "leveldb/env.h"
|
|
#include "leveldb/iterator.h"
|
|
#include "util/coding.h"
|
|
|
|
namespace leveldb {
|
|
|
|
static Slice GetLengthPrefixedSlice(const char* data) {
|
|
uint32_t len;
|
|
const char* p = data;
|
|
p = GetVarint32Ptr(p, p + 5, &len); // +5: we assume "p" is not corrupted
|
|
return Slice(p, len);
|
|
}
|
|
|
|
MemTable::MemTable(const InternalKeyComparator& cmp)
|
|
: comparator_(cmp),
|
|
refs_(0),
|
|
table_(comparator_, &arena_) {
|
|
}
|
|
|
|
MemTable::~MemTable() {
|
|
assert(refs_ == 0);
|
|
}
|
|
|
|
size_t MemTable::ApproximateMemoryUsage() { return arena_.MemoryUsage(); }
|
|
|
|
int MemTable::KeyComparator::operator()(const char* aptr, const char* bptr)
|
|
const {
|
|
// Internal keys are encoded as length-prefixed strings.
|
|
Slice a = GetLengthPrefixedSlice(aptr);
|
|
Slice b = GetLengthPrefixedSlice(bptr);
|
|
return comparator.Compare(a, b);
|
|
}
|
|
|
|
// Encode a suitable internal key target for "target" and return it.
|
|
// Uses *scratch as scratch space, and the returned pointer will point
|
|
// into this scratch space.
|
|
static const char* EncodeKey(std::string* scratch, const Slice& target) {
|
|
scratch->clear();
|
|
PutVarint32(scratch, target.size());
|
|
scratch->append(target.data(), target.size());
|
|
return scratch->data();
|
|
}
|
|
|
|
class MemTableIterator: public Iterator {
|
|
public:
|
|
explicit MemTableIterator(MemTable::Table* table) : iter_(table) { }
|
|
|
|
virtual bool Valid() const { return iter_.Valid(); }
|
|
virtual void Seek(const Slice& k) { iter_.Seek(EncodeKey(&tmp_, k)); }
|
|
virtual void SeekToFirst() { iter_.SeekToFirst(); }
|
|
virtual void SeekToLast() { iter_.SeekToLast(); }
|
|
virtual void Next() { iter_.Next(); }
|
|
virtual void Prev() { iter_.Prev(); }
|
|
virtual Slice key() const { return GetLengthPrefixedSlice(iter_.key()); }
|
|
virtual Slice value() const {
|
|
Slice key_slice = GetLengthPrefixedSlice(iter_.key());
|
|
return GetLengthPrefixedSlice(key_slice.data() + key_slice.size());
|
|
}
|
|
|
|
virtual Status status() const { return Status::OK(); }
|
|
|
|
private:
|
|
MemTable::Table::Iterator iter_;
|
|
std::string tmp_; // For passing to EncodeKey
|
|
|
|
// No copying allowed
|
|
MemTableIterator(const MemTableIterator&);
|
|
void operator=(const MemTableIterator&);
|
|
};
|
|
|
|
Iterator* MemTable::NewIterator() {
|
|
return new MemTableIterator(&table_);
|
|
}
|
|
|
|
void MemTable::Add(SequenceNumber s, ValueType type,
|
|
const Slice& key,
|
|
const Slice& value) {
|
|
// Format of an entry is concatenation of:
|
|
// key_size : varint32 of internal_key.size()
|
|
// key bytes : char[internal_key.size()]
|
|
// value_size : varint32 of value.size()
|
|
// value bytes : char[value.size()]
|
|
size_t key_size = key.size();
|
|
size_t val_size = value.size();
|
|
size_t internal_key_size = key_size + 8;
|
|
const size_t encoded_len =
|
|
VarintLength(internal_key_size) + internal_key_size +
|
|
VarintLength(val_size) + val_size;
|
|
char* buf = arena_.Allocate(encoded_len);
|
|
char* p = EncodeVarint32(buf, internal_key_size);
|
|
memcpy(p, key.data(), key_size);
|
|
p += key_size;
|
|
EncodeFixed64(p, (s << 8) | type);
|
|
p += 8;
|
|
p = EncodeVarint32(p, val_size);
|
|
memcpy(p, value.data(), val_size);
|
|
assert((p + val_size) - buf == encoded_len);
|
|
table_.Insert(buf);
|
|
}
|
|
|
|
bool MemTable::Get(const LookupKey& key, std::string* value, Status* s) {
|
|
Slice memkey = key.memtable_key();
|
|
Table::Iterator iter(&table_);
|
|
iter.Seek(memkey.data());
|
|
if (iter.Valid()) {
|
|
// entry format is:
|
|
// klength varint32
|
|
// userkey char[klength]
|
|
// tag uint64
|
|
// vlength varint32
|
|
// value char[vlength]
|
|
// Check that it belongs to same user key. We do not check the
|
|
// sequence number since the Seek() call above should have skipped
|
|
// all entries with overly large sequence numbers.
|
|
const char* entry = iter.key();
|
|
uint32_t key_length;
|
|
const char* key_ptr = GetVarint32Ptr(entry, entry+5, &key_length);
|
|
if (comparator_.comparator.user_comparator()->Compare(
|
|
Slice(key_ptr, key_length - 8),
|
|
key.user_key()) == 0) {
|
|
// Correct user key
|
|
const uint64_t tag = DecodeFixed64(key_ptr + key_length - 8);
|
|
switch (static_cast<ValueType>(tag & 0xff)) {
|
|
case kTypeValue: {
|
|
Slice v = GetLengthPrefixedSlice(key_ptr + key_length);
|
|
value->assign(v.data(), v.size());
|
|
return true;
|
|
}
|
|
case kTypeDeletion:
|
|
*s = Status::NotFound(Slice());
|
|
return true;
|
|
}
|
|
}
|
|
}
|
|
return false;
|
|
}
|
|
|
|
} // namespace leveldb
|