Update upper_bound_offset when reseek changes iterate_upper_bound dynamically (#11775)

Summary:
Update the logic in FilePrefetchBuffer to update `upper_bound_offset_` during reseek. During Reseek, `iterate_upper_bound` can be changed dynamically. So added an API to update that in FilePrefetchBuffer.
Added unit test to confirm the behavior.

Pull Request resolved: https://github.com/facebook/rocksdb/pull/11775

Test Plan:
- Check stress tests in case there is any failure after this diff.
- make crash_test -j32 with auto_readahead_size=1 passed locally

Reviewed By: anand1976

Differential Revision: D48815177

Pulled By: akankshamahajan15

fbshipit-source-id: 5f44fbb3af06c86a1c38f139c5fa4543891837f4
This commit is contained in:
akankshamahajan 2023-09-15 10:05:56 -07:00 committed by Facebook GitHub Bot
parent e1fd348b92
commit 1e2fd343bb
4 changed files with 78 additions and 21 deletions

View file

@ -279,6 +279,11 @@ class FilePrefetchBuffer {
// Callback function passed to underlying FS in case of asynchronous reads.
void PrefetchAsyncCallback(const FSReadRequest& req, void* cb_arg);
void ResetUpperBoundOffset(uint64_t upper_bound_offset) {
upper_bound_offset_ = upper_bound_offset;
readahead_size_ = initial_auto_readahead_size_;
}
private:
// Calculates roundoff offset and length to be prefetched based on alignment
// and data present in buffer_. It also allocates new buffer or refit tail if
@ -321,7 +326,6 @@ class FilePrefetchBuffer {
void ResetValues() {
num_file_reads_ = 1;
readahead_size_ = initial_auto_readahead_size_;
upper_bound_offset_ = 0;
}
// Called in case of implicit auto prefetching.

View file

@ -2082,6 +2082,7 @@ TEST_P(PrefetchTest, IterReadAheadSizeWithUpperBound) {
int buff_count_with_tuning = 0, buff_count_without_tuning = 0;
int keys_with_tuning = 0, keys_without_tuning = 0;
int reseek_keys_with_tuning = 0, reseek_keys_without_tuning = 0;
buff_prefetch_count = 0;
SyncPoint::GetInstance()->SetCallBack(
@ -2102,17 +2103,19 @@ TEST_P(PrefetchTest, IterReadAheadSizeWithUpperBound) {
ropts.async_io = true;
}
Slice ub = Slice("my_key_uuu");
ropts.iterate_upper_bound = &ub;
Slice seek_key = Slice("my_key_aaa");
// With tuning readahead_size.
{
ASSERT_OK(options.statistics->Reset());
Slice ub = Slice("my_key_uuu");
Slice* ub_ptr = &ub;
ropts.iterate_upper_bound = ub_ptr;
ropts.auto_readahead_size = true;
auto iter = std::unique_ptr<Iterator>(db_->NewIterator(ropts));
// Seek.
{
Slice seek_key = Slice("my_key_aaa");
iter->Seek(seek_key);
while (iter->Valid()) {
@ -2120,20 +2123,44 @@ TEST_P(PrefetchTest, IterReadAheadSizeWithUpperBound) {
iter->Next();
}
uint64_t readhahead_trimmed =
uint64_t readahead_trimmed =
options.statistics->getAndResetTickerCount(READAHEAD_TRIMMED);
ASSERT_GT(readhahead_trimmed, 0);
ASSERT_GT(readahead_trimmed, 0);
buff_count_with_tuning = buff_prefetch_count;
}
// Reseek with new upper_bound_iterator.
{
ub = Slice("my_key_y");
Slice reseek_key = Slice("my_key_v");
iter->Seek(reseek_key);
while (iter->Valid()) {
iter->Next();
reseek_keys_with_tuning++;
}
uint64_t readahead_trimmed =
options.statistics->getAndResetTickerCount(READAHEAD_TRIMMED);
ASSERT_GT(readahead_trimmed, 0);
ASSERT_GT(reseek_keys_with_tuning, 0);
}
}
// Without tuning readahead_size
{
Slice ub = Slice("my_key_uuu");
Slice* ub_ptr = &ub;
ropts.iterate_upper_bound = ub_ptr;
buff_prefetch_count = 0;
ASSERT_OK(options.statistics->Reset());
ropts.auto_readahead_size = false;
auto iter = std::unique_ptr<Iterator>(db_->NewIterator(ropts));
// Seek.
{
Slice seek_key = Slice("my_key_aaa");
iter->Seek(seek_key);
while (iter->Valid()) {
@ -2141,9 +2168,27 @@ TEST_P(PrefetchTest, IterReadAheadSizeWithUpperBound) {
iter->Next();
}
buff_count_without_tuning = buff_prefetch_count;
uint64_t readhahead_trimmed =
uint64_t readahead_trimmed =
options.statistics->getAndResetTickerCount(READAHEAD_TRIMMED);
ASSERT_EQ(readhahead_trimmed, 0);
ASSERT_EQ(readahead_trimmed, 0);
}
// Reseek with new upper_bound_iterator.
{
ub = Slice("my_key_y");
Slice reseek_key = Slice("my_key_v");
iter->Seek(reseek_key);
while (iter->Valid()) {
iter->Next();
reseek_keys_without_tuning++;
}
uint64_t readahead_trimmed =
options.statistics->getAndResetTickerCount(READAHEAD_TRIMMED);
ASSERT_EQ(readahead_trimmed, 0);
ASSERT_GT(reseek_keys_without_tuning, 0);
}
}
{
@ -2159,6 +2204,8 @@ TEST_P(PrefetchTest, IterReadAheadSizeWithUpperBound) {
ASSERT_GT(buff_count_with_tuning, 0);
// No of keys should be equal.
ASSERT_EQ(keys_without_tuning, keys_with_tuning);
// No of keys after reseek with new upper bound should be equal.
ASSERT_EQ(reseek_keys_without_tuning, reseek_keys_with_tuning);
}
Close();
}

View file

@ -55,6 +55,11 @@ class BlockPrefetcher {
void SetUpperBoundOffset(uint64_t upper_bound_offset) {
upper_bound_offset_ = upper_bound_offset;
if (prefetch_buffer() != nullptr) {
// Upper bound can be changed on reseek. So update that in
// FilePrefetchBuffer.
prefetch_buffer()->ResetUpperBoundOffset(upper_bound_offset);
}
}
private:

View file

@ -0,0 +1 @@
* When auto_readahead_size is enabled, update readahead upper bound during readahead trimming when reseek changes iterate_upper_bound dynamically.