Fix Clang/GCC compilation warnings.

This makes it easier to adopt snappy in other projects.

PiperOrigin-RevId: 309958249
This commit is contained in:
Victor Costan 2020-05-05 16:13:04 +00:00
parent 113cd97ab3
commit c98344f626
6 changed files with 141 additions and 53 deletions

View file

@ -37,6 +37,49 @@ if(NOT CMAKE_CXX_STANDARD)
set(CMAKE_CXX_EXTENSIONS OFF) set(CMAKE_CXX_EXTENSIONS OFF)
endif(NOT CMAKE_CXX_STANDARD) endif(NOT CMAKE_CXX_STANDARD)
# https://github.com/izenecloud/cmake/blob/master/SetCompilerWarningAll.cmake
if(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
# Use the highest warning level for Visual Studio.
set(CMAKE_CXX_WARNING_LEVEL 4)
if(CMAKE_CXX_FLAGS MATCHES "/W[0-4]")
string(REGEX REPLACE "/W[0-4]" "/W4" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
else(CMAKE_CXX_FLAGS MATCHES "/W[0-4]")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /W4")
endif(CMAKE_CXX_FLAGS MATCHES "/W[0-4]")
# Disable C++ exceptions.
string(REGEX REPLACE "/EH[a-z]+" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /EHs-c-")
add_definitions(-D_HAS_EXCEPTIONS=0)
# Disable RTTI.
string(REGEX REPLACE "/GR" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /GR-")
else(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
# Use -Wall for clang and gcc.
if(NOT CMAKE_CXX_FLAGS MATCHES "-Wall")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall")
endif(NOT CMAKE_CXX_FLAGS MATCHES "-Wall")
# Use -Wextra for clang and gcc.
if(NOT CMAKE_CXX_FLAGS MATCHES "-Wextra")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wextra")
endif(NOT CMAKE_CXX_FLAGS MATCHES "-Wextra")
# Use -Werror for clang and gcc.
if(NOT CMAKE_CXX_FLAGS MATCHES "-Werror")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror")
endif(NOT CMAKE_CXX_FLAGS MATCHES "-Werror")
# Disable C++ exceptions.
string(REGEX REPLACE "-fexceptions" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-exceptions")
# Disable RTTI.
string(REGEX REPLACE "-frtti" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-rtti")
endif(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
# BUILD_SHARED_LIBS is a standard CMake variable, but we declare it here to make # BUILD_SHARED_LIBS is a standard CMake variable, but we declare it here to make
# it prominent in the GUI. # it prominent in the GUI.
option(BUILD_SHARED_LIBS "Build shared libraries(DLLs)." OFF) option(BUILD_SHARED_LIBS "Build shared libraries(DLLs)." OFF)

View file

@ -33,17 +33,24 @@
namespace snappy { namespace snappy {
Source::~Source() { } Source::~Source() = default;
Sink::~Sink() { } Sink::~Sink() = default;
char* Sink::GetAppendBuffer(size_t length, char* scratch) { char* Sink::GetAppendBuffer(size_t length, char* scratch) {
// TODO: Switch to [[maybe_unused]] when we can assume C++17.
(void)length;
return scratch; return scratch;
} }
char* Sink::GetAppendBufferVariable( char* Sink::GetAppendBufferVariable(
size_t min_size, size_t desired_size_hint, char* scratch, size_t min_size, size_t desired_size_hint, char* scratch,
size_t scratch_size, size_t* allocated_size) { size_t scratch_size, size_t* allocated_size) {
// TODO: Switch to [[maybe_unused]] when we can assume C++17.
(void)min_size;
(void)desired_size_hint;
*allocated_size = scratch_size; *allocated_size = scratch_size;
return scratch; return scratch;
} }
@ -56,7 +63,7 @@ void Sink::AppendAndTakeOwnership(
(*deleter)(deleter_arg, bytes, n); (*deleter)(deleter_arg, bytes, n);
} }
ByteArraySource::~ByteArraySource() { } ByteArraySource::~ByteArraySource() = default;
size_t ByteArraySource::Available() const { return left_; } size_t ByteArraySource::Available() const { return left_; }
@ -81,16 +88,20 @@ void UncheckedByteArraySink::Append(const char* data, size_t n) {
} }
char* UncheckedByteArraySink::GetAppendBuffer(size_t len, char* scratch) { char* UncheckedByteArraySink::GetAppendBuffer(size_t len, char* scratch) {
// TODO: Switch to [[maybe_unused]] when we can assume C++17.
(void)len;
(void)scratch;
return dest_; return dest_;
} }
void UncheckedByteArraySink::AppendAndTakeOwnership( void UncheckedByteArraySink::AppendAndTakeOwnership(
char* data, size_t n, char* bytes, size_t n,
void (*deleter)(void*, const char*, size_t), void (*deleter)(void*, const char*, size_t),
void *deleter_arg) { void *deleter_arg) {
if (data != dest_) { if (bytes != dest_) {
std::memcpy(dest_, data, n); std::memcpy(dest_, bytes, n);
(*deleter)(deleter_arg, data, n); (*deleter)(deleter_arg, bytes, n);
} }
dest_ += n; dest_ += n;
} }
@ -98,6 +109,11 @@ void UncheckedByteArraySink::AppendAndTakeOwnership(
char* UncheckedByteArraySink::GetAppendBufferVariable( char* UncheckedByteArraySink::GetAppendBufferVariable(
size_t min_size, size_t desired_size_hint, char* scratch, size_t min_size, size_t desired_size_hint, char* scratch,
size_t scratch_size, size_t* allocated_size) { size_t scratch_size, size_t* allocated_size) {
// TODO: Switch to [[maybe_unused]] when we can assume C++17.
(void)min_size;
(void)scratch;
(void)scratch_size;
*allocated_size = desired_size_hint; *allocated_size = desired_size_hint;
return dest_; return dest_;
} }

View file

@ -95,7 +95,7 @@
#ifdef ARRAYSIZE #ifdef ARRAYSIZE
#undef ARRAYSIZE #undef ARRAYSIZE
#endif #endif
#define ARRAYSIZE(a) (sizeof(a) / sizeof(*(a))) #define ARRAYSIZE(a) int{sizeof(a) / sizeof(*(a))}
// Static prediction hints. // Static prediction hints.
#ifdef HAVE_BUILTIN_EXPECT #ifdef HAVE_BUILTIN_EXPECT
@ -439,28 +439,28 @@ inline const char* Varint::Parse32WithLimit(const char* p,
inline char* Varint::Encode32(char* sptr, uint32_t v) { inline char* Varint::Encode32(char* sptr, uint32_t v) {
// Operate on characters as unsigneds // Operate on characters as unsigneds
unsigned char* ptr = reinterpret_cast<unsigned char*>(sptr); uint8_t* ptr = reinterpret_cast<uint8_t*>(sptr);
static const int B = 128; static const uint8_t B = 128;
if (v < (1<<7)) { if (v < (1 << 7)) {
*(ptr++) = v; *(ptr++) = static_cast<uint8_t>(v);
} else if (v < (1<<14)) { } else if (v < (1 << 14)) {
*(ptr++) = v | B; *(ptr++) = static_cast<uint8_t>(v | B);
*(ptr++) = v>>7; *(ptr++) = static_cast<uint8_t>(v >> 7);
} else if (v < (1<<21)) { } else if (v < (1 << 21)) {
*(ptr++) = v | B; *(ptr++) = static_cast<uint8_t>(v | B);
*(ptr++) = (v>>7) | B; *(ptr++) = static_cast<uint8_t>((v >> 7) | B);
*(ptr++) = v>>14; *(ptr++) = static_cast<uint8_t>(v >> 14);
} else if (v < (1<<28)) { } else if (v < (1 << 28)) {
*(ptr++) = v | B; *(ptr++) = static_cast<uint8_t>(v | B);
*(ptr++) = (v>>7) | B; *(ptr++) = static_cast<uint8_t>((v >> 7) | B);
*(ptr++) = (v>>14) | B; *(ptr++) = static_cast<uint8_t>((v >> 14) | B);
*(ptr++) = v>>21; *(ptr++) = static_cast<uint8_t>(v >> 21);
} else { } else {
*(ptr++) = v | B; *(ptr++) = static_cast<uint8_t>(v | B);
*(ptr++) = (v>>7) | B; *(ptr++) = static_cast<uint8_t>((v>>7) | B);
*(ptr++) = (v>>14) | B; *(ptr++) = static_cast<uint8_t>((v>>14) | B);
*(ptr++) = (v>>21) | B; *(ptr++) = static_cast<uint8_t>((v>>21) | B);
*(ptr++) = v>>28; *(ptr++) = static_cast<uint8_t>(v >> 28);
} }
return reinterpret_cast<char*>(ptr); return reinterpret_cast<char*>(ptr);
} }

View file

@ -118,7 +118,7 @@ namespace file {
}; };
DummyStatus GetContents( DummyStatus GetContents(
const std::string& filename, std::string* data, int unused) { const std::string& filename, std::string* data, int /*unused*/) {
FILE* fp = std::fopen(filename.c_str(), "rb"); FILE* fp = std::fopen(filename.c_str(), "rb");
if (fp == NULL) { if (fp == NULL) {
std::perror(filename.c_str()); std::perror(filename.c_str());
@ -142,7 +142,7 @@ namespace file {
} }
inline DummyStatus SetContents( inline DummyStatus SetContents(
const std::string& filename, const std::string& str, int unused) { const std::string& filename, const std::string& str, int /*unused*/) {
FILE* fp = std::fopen(filename.c_str(), "wb"); FILE* fp = std::fopen(filename.c_str(), "wb");
if (fp == NULL) { if (fp == NULL) {
std::perror(filename.c_str()); std::perror(filename.c_str());

View file

@ -96,7 +96,7 @@ static inline uint32_t HashBytes(uint32_t bytes, int shift) {
return (bytes * kMul) >> shift; return (bytes * kMul) >> shift;
} }
size_t MaxCompressedLength(size_t source_len) { size_t MaxCompressedLength(size_t source_bytes) {
// Compressed data can be defined as: // Compressed data can be defined as:
// compressed := item* literal* // compressed := item* literal*
// item := literal* copy // item := literal* copy
@ -117,7 +117,7 @@ size_t MaxCompressedLength(size_t source_len) {
// I.e., 6 bytes of input turn into 7 bytes of "compressed" data. // I.e., 6 bytes of input turn into 7 bytes of "compressed" data.
// //
// This last factor dominates the blowup, so the final estimate is: // This last factor dominates the blowup, so the final estimate is:
return 32 + source_len + source_len/6; return 32 + source_bytes + source_bytes / 6;
} }
namespace { namespace {
@ -563,7 +563,7 @@ char* CompressFragment(const char* input,
// These for-loops are meant to be unrolled. So we can freely // These for-loops are meant to be unrolled. So we can freely
// special case the first iteration to use the value already // special case the first iteration to use the value already
// loaded in preload. // loaded in preload.
uint32_t dword = i == 0 ? preload : data; uint32_t dword = i == 0 ? preload : static_cast<uint32_t>(data);
assert(dword == LittleEndian::Load32(ip + i)); assert(dword == LittleEndian::Load32(ip + i));
uint32_t hash = HashBytes(dword, shift); uint32_t hash = HashBytes(dword, shift);
candidate = base_ip + table[hash]; candidate = base_ip + table[hash];
@ -680,7 +680,12 @@ char* CompressFragment(const char* input,
// Called back at avery compression call to trace parameters and sizes. // Called back at avery compression call to trace parameters and sizes.
static inline void Report(const char *algorithm, size_t compressed_size, static inline void Report(const char *algorithm, size_t compressed_size,
size_t uncompressed_size) {} size_t uncompressed_size) {
// TODO: Switch to [[maybe_unused]] when we can assume C++17.
(void)algorithm;
(void)compressed_size;
(void)uncompressed_size;
}
// Signature of output types needed by decompression code. // Signature of output types needed by decompression code.
// The decompression code is templatized on a type that obeys this // The decompression code is templatized on a type that obeys this
@ -1170,7 +1175,10 @@ class SnappyIOVecWriter {
} }
char* GetOutputPtr() { return nullptr; } char* GetOutputPtr() { return nullptr; }
void SetOutputPtr(char* op) {} void SetOutputPtr(char* op) {
// TODO: Switch to [[maybe_unused]] when we can assume C++17.
(void)op;
}
inline bool AppendNoCheck(const char* ip, size_t len) { inline bool AppendNoCheck(const char* ip, size_t len) {
while (len > 0) { while (len > 0) {
@ -1360,10 +1368,13 @@ class SnappyArrayWriter {
SNAPPY_ATTRIBUTE_ALWAYS_INLINE SNAPPY_ATTRIBUTE_ALWAYS_INLINE
inline bool AppendFromSelf(size_t offset, size_t len, char** op_p) { inline bool AppendFromSelf(size_t offset, size_t len, char** op_p) {
char* const op = *op_p; char* const op = *op_p;
assert(op >= base_);
char* const op_end = op + len; char* const op_end = op + len;
// Check if we try to append from before the start of the buffer. // Check if we try to append from before the start of the buffer.
if (SNAPPY_PREDICT_FALSE(op - base_ < offset)) return false; if (SNAPPY_PREDICT_FALSE(static_cast<size_t>(op - base_) < offset))
return false;
if (SNAPPY_PREDICT_FALSE((kSlopBytes < 64 && len > kSlopBytes) || if (SNAPPY_PREDICT_FALSE((kSlopBytes < 64 && len > kSlopBytes) ||
op >= op_limit_min_slop_ || offset < len)) { op >= op_limit_min_slop_ || offset < len)) {
if (op_end > op_limit_ || offset == 0) return false; if (op_end > op_limit_ || offset == 0) return false;
@ -1381,8 +1392,9 @@ class SnappyArrayWriter {
inline void Flush() {} inline void Flush() {}
}; };
bool RawUncompress(const char* compressed, size_t n, char* uncompressed) { bool RawUncompress(const char* compressed, size_t compressed_length,
ByteArraySource reader(compressed, n); char* uncompressed) {
ByteArraySource reader(compressed, compressed_length);
return RawUncompress(&reader, uncompressed); return RawUncompress(&reader, uncompressed);
} }
@ -1391,9 +1403,10 @@ bool RawUncompress(Source* compressed, char* uncompressed) {
return InternalUncompress(compressed, &output); return InternalUncompress(compressed, &output);
} }
bool Uncompress(const char* compressed, size_t n, std::string* uncompressed) { bool Uncompress(const char* compressed, size_t compressed_length,
std::string* uncompressed) {
size_t ulength; size_t ulength;
if (!GetUncompressedLength(compressed, n, &ulength)) { if (!GetUncompressedLength(compressed, compressed_length, &ulength)) {
return false; return false;
} }
// On 32-bit builds: max_size() < kuint32max. Check for that instead // On 32-bit builds: max_size() < kuint32max. Check for that instead
@ -1402,7 +1415,8 @@ bool Uncompress(const char* compressed, size_t n, std::string* uncompressed) {
return false; return false;
} }
STLStringResizeUninitialized(uncompressed, ulength); STLStringResizeUninitialized(uncompressed, ulength);
return RawUncompress(compressed, n, string_as_array(uncompressed)); return RawUncompress(compressed, compressed_length,
string_as_array(uncompressed));
} }
// A Writer that drops everything on the floor and just does validation // A Writer that drops everything on the floor and just does validation
@ -1422,11 +1436,20 @@ class SnappyDecompressionValidator {
return expected_ == produced_; return expected_ == produced_;
} }
inline bool Append(const char* ip, size_t len, size_t* produced) { inline bool Append(const char* ip, size_t len, size_t* produced) {
// TODO: Switch to [[maybe_unused]] when we can assume C++17.
(void)ip;
*produced += len; *produced += len;
return *produced <= expected_; return *produced <= expected_;
} }
inline bool TryFastAppend(const char* ip, size_t available, size_t length, inline bool TryFastAppend(const char* ip, size_t available, size_t length,
size_t* produced) { size_t* produced) {
// TODO: Switch to [[maybe_unused]] when we can assume C++17.
(void)ip;
(void)available;
(void)length;
(void)produced;
return false; return false;
} }
inline bool AppendFromSelf(size_t offset, size_t len, size_t* produced) { inline bool AppendFromSelf(size_t offset, size_t len, size_t* produced) {
@ -1439,8 +1462,8 @@ class SnappyDecompressionValidator {
inline void Flush() {} inline void Flush() {}
}; };
bool IsValidCompressedBuffer(const char* compressed, size_t n) { bool IsValidCompressedBuffer(const char* compressed, size_t compressed_length) {
ByteArraySource reader(compressed, n); ByteArraySource reader(compressed, compressed_length);
SnappyDecompressionValidator writer; SnappyDecompressionValidator writer;
return InternalUncompress(&reader, &writer); return InternalUncompress(&reader, &writer);
} }
@ -1567,13 +1590,15 @@ class SnappyScatteredWriter {
inline bool AppendFromSelf(size_t offset, size_t len, char** op_p) { inline bool AppendFromSelf(size_t offset, size_t len, char** op_p) {
char* op = *op_p; char* op = *op_p;
assert(op >= op_base_);
// Check if we try to append from before the start of the buffer. // Check if we try to append from before the start of the buffer.
if (SNAPPY_PREDICT_FALSE((kSlopBytes < 64 && len > kSlopBytes) || if (SNAPPY_PREDICT_FALSE((kSlopBytes < 64 && len > kSlopBytes) ||
op - op_base_ < offset || static_cast<size_t>(op - op_base_) < offset ||
op >= op_limit_min_slop_ || offset < len)) { op >= op_limit_min_slop_ || offset < len)) {
if (offset == 0) return false; if (offset == 0) return false;
char* const op_end = op + len; char* const op_end = op + len;
if (SNAPPY_PREDICT_FALSE(op - op_base_ < offset || op_end > op_limit_)) { if (SNAPPY_PREDICT_FALSE(static_cast<size_t>(op - op_base_) < offset ||
op_end > op_limit_)) {
op_ptr_ = op; op_ptr_ = op;
bool res = SlowAppendFromSelf(offset, len); bool res = SlowAppendFromSelf(offset, len);
*op_p = op_ptr_; *op_p = op_ptr_;
@ -1692,6 +1717,10 @@ class SnappySinkAllocator {
}; };
static void Deleter(void* arg, const char* bytes, size_t size) { static void Deleter(void* arg, const char* bytes, size_t size) {
// TODO: Switch to [[maybe_unused]] when we can assume C++17.
(void)arg;
(void)size;
delete[] bytes; delete[] bytes;
} }

View file

@ -415,7 +415,7 @@ static void VerifyIOVec(const std::string& input) {
num = input.size(); num = input.size();
} }
struct iovec* iov = new iovec[num]; struct iovec* iov = new iovec[num];
int used_so_far = 0; size_t used_so_far = 0;
std::bernoulli_distribution one_in_five(1.0 / 5); std::bernoulli_distribution one_in_five(1.0 / 5);
for (size_t i = 0; i < num; ++i) { for (size_t i = 0; i < num; ++i) {
assert(used_so_far < input.size()); assert(used_so_far < input.size());
@ -742,7 +742,7 @@ TEST(Snappy, FourByteOffset) {
// How many times each fragment is emitted. // How many times each fragment is emitted.
const int n1 = 2; const int n1 = 2;
const int n2 = 100000 / fragment2.size(); const int n2 = 100000 / fragment2.size();
const int length = n1 * fragment1.size() + n2 * fragment2.size(); const size_t length = n1 * fragment1.size() + n2 * fragment2.size();
std::string compressed; std::string compressed;
Varint::Append32(&compressed, length); Varint::Append32(&compressed, length);
@ -1078,12 +1078,12 @@ TEST(Snappy, FindMatchLengthRandom) {
} }
DataEndingAtUnreadablePage u(s); DataEndingAtUnreadablePage u(s);
DataEndingAtUnreadablePage v(t); DataEndingAtUnreadablePage v(t);
int matched = TestFindMatchLength(u.data(), v.data(), t.size()); size_t matched = TestFindMatchLength(u.data(), v.data(), t.size());
if (matched == t.size()) { if (matched == t.size()) {
EXPECT_EQ(s, t); EXPECT_EQ(s, t);
} else { } else {
EXPECT_NE(s[matched], t[matched]); EXPECT_NE(s[matched], t[matched]);
for (int j = 0; j < matched; ++j) { for (size_t j = 0; j < matched; ++j) {
EXPECT_EQ(s[j], t[j]); EXPECT_EQ(s[j], t[j]);
} }
} }
@ -1323,7 +1323,7 @@ static void BM_UIOVec(int iters, int arg) {
const int kNumEntries = 10; const int kNumEntries = 10;
struct iovec iov[kNumEntries]; struct iovec iov[kNumEntries];
char *dst = new char[contents.size()]; char *dst = new char[contents.size()];
int used_so_far = 0; size_t used_so_far = 0;
for (int i = 0; i < kNumEntries; ++i) { for (int i = 0; i < kNumEntries; ++i) {
iov[i].iov_base = dst + used_so_far; iov[i].iov_base = dst + used_so_far;
if (used_so_far == contents.size()) { if (used_so_far == contents.size()) {
@ -1475,7 +1475,7 @@ static void BM_ZFlatIncreasingTableSize(int iters, int arg) {
SetBenchmarkBytesProcessed(static_cast<int64_t>(iters) * total_contents_size); SetBenchmarkBytesProcessed(static_cast<int64_t>(iters) * total_contents_size);
StartBenchmarkTiming(); StartBenchmarkTiming();
while (iters-- > 0) { while (iters-- > 0) {
for (int i = 0; i < contents.size(); ++i) { for (size_t i = 0; i < contents.size(); ++i) {
snappy::RawCompress(contents[i].data(), contents[i].size(), dst[i], snappy::RawCompress(contents[i].data(), contents[i].size(), dst[i],
&zsize); &zsize);
} }