mirror of
https://github.com/facebook/rocksdb.git
synced 2024-11-28 15:33:54 +00:00
Fix lint issues after enable BLACK (#10717)
Summary: As title Pull Request resolved: https://github.com/facebook/rocksdb/pull/10717 Test Plan: Unit Tests CI Reviewed By: riversand963 Differential Revision: D39700707 Pulled By: gitbw95 fbshipit-source-id: 54de27e695535a50159f5f6467da36aaf21bebae
This commit is contained in:
parent
749b849a34
commit
dd40f83e95
|
@ -98,7 +98,7 @@ class BenchmarkUtils:
|
|||
# e.g. 2022-07-1T00:14:55 should be 2022-07-01T00:14:55
|
||||
row["test_date"] = dt.isoformat()
|
||||
row["date"] = dt.isoformat()
|
||||
return {key.replace(".", "_") : value for key, value in row.items()}
|
||||
return {key.replace(".", "_"): value for key, value in row.items()}
|
||||
|
||||
|
||||
class ResultParser:
|
||||
|
@ -141,7 +141,9 @@ class ResultParser:
|
|||
row.append("")
|
||||
line = line[len(intra) :]
|
||||
else:
|
||||
raise BenchmarkResultException("Invalid TSV line", f"{line_in} at {line}")
|
||||
raise BenchmarkResultException(
|
||||
"Invalid TSV line", f"{line_in} at {line}"
|
||||
)
|
||||
return row
|
||||
|
||||
def parse(self, lines):
|
||||
|
|
|
@ -30,7 +30,7 @@ class BenchmarkRunner(ABC):
|
|||
# refer GetInfoLogPrefix() in rocksdb/util/filename.cc
|
||||
# example db_path: /dev/shm/dbbench
|
||||
file_name = db_path[1:] # to ignore the leading '/' character
|
||||
to_be_replaced = re.compile("[^0-9a-zA-Z\-_\.]") # noqa
|
||||
to_be_replaced = re.compile("[^0-9a-zA-Z\-_\.]") # noqa
|
||||
for character in to_be_replaced.findall(db_path):
|
||||
file_name = file_name.replace(character, "_")
|
||||
if not file_name.endswith("_"):
|
||||
|
|
|
@ -33,7 +33,7 @@ class Log:
|
|||
def is_new_log(log_line):
|
||||
# The assumption is that a new log will start with a date printed in
|
||||
# the below regex format.
|
||||
date_regex = "\d{4}/\d{2}/\d{2}-\d{2}:\d{2}:\d{2}\.\d{6}" # noqa
|
||||
date_regex = "\d{4}/\d{2}/\d{2}-\d{2}:\d{2}:\d{2}\.\d{6}" # noqa
|
||||
return re.match(date_regex, log_line)
|
||||
|
||||
def __init__(self, log_line, column_families):
|
||||
|
@ -46,7 +46,7 @@ class Log:
|
|||
# "2018/07/25-17:29:05.176080 7f969de68700 [db/compaction_job.cc:1634]
|
||||
# [default] [JOB 3] Compacting 24@0 + 16@1 files to L1, score 6.00\n"
|
||||
for col_fam in column_families:
|
||||
search_for_str = "\[" + col_fam + "\]" # noqa
|
||||
search_for_str = "\[" + col_fam + "\]" # noqa
|
||||
if re.search(search_for_str, self.message):
|
||||
self.column_family = col_fam
|
||||
break
|
||||
|
|
|
@ -299,8 +299,14 @@ class OdsStatsFetcher(TimeSeriesData):
|
|||
reqd_stats.append(key)
|
||||
return reqd_stats
|
||||
|
||||
def fetch_rate_url(self, entities: List[str], keys: List[str],
|
||||
window_len: str, percent: str, display: bool) -> str:
|
||||
def fetch_rate_url(
|
||||
self,
|
||||
entities: List[str],
|
||||
keys: List[str],
|
||||
window_len: str,
|
||||
percent: str,
|
||||
display: bool,
|
||||
) -> str:
|
||||
transform_desc = (
|
||||
"rate(" + str(window_len) + ",duration=" + str(self.duration_sec)
|
||||
)
|
||||
|
|
|
@ -44,8 +44,14 @@ class TimeSeriesData(DataSource):
|
|||
# for each of them and populates the 'keys_ts' dictionary
|
||||
pass
|
||||
|
||||
def fetch_burst_epochs(self, entities: str, statistic: int,
|
||||
window_sec: float, threshold: bool, percent: bool) -> Dict[str, Dict[int, float]]:
|
||||
def fetch_burst_epochs(
|
||||
self,
|
||||
entities: str,
|
||||
statistic: int,
|
||||
window_sec: float,
|
||||
threshold: bool,
|
||||
percent: bool,
|
||||
) -> Dict[str, Dict[int, float]]:
|
||||
# this method calculates the (percent) rate change in the 'statistic'
|
||||
# for each entity (over 'window_sec' seconds) and returns the epochs
|
||||
# where this rate change is greater than or equal to the 'threshold'
|
||||
|
|
|
@ -67,7 +67,7 @@ class Rule(Section):
|
|||
+ ": rule must be associated with 2 conditions\
|
||||
in order to check for a time dependency between them"
|
||||
)
|
||||
time_format = "^\d+[s|m|h|d]$" # noqa
|
||||
time_format = "^\d+[s|m|h|d]$" # noqa
|
||||
if not re.match(time_format, self.overlap_time_seconds, re.IGNORECASE):
|
||||
raise ValueError(
|
||||
self.name + ": overlap_time_seconds format: \d+[s|m|h|d]"
|
||||
|
|
|
@ -471,7 +471,7 @@ multiops_wp_txn_params = {
|
|||
|
||||
|
||||
def finalize_and_sanitize(src_params):
|
||||
dest_params = {k : v() if callable(v) else v for (k, v) in src_params.items()}
|
||||
dest_params = {k: v() if callable(v) else v for (k, v) in src_params.items()}
|
||||
if is_release_mode():
|
||||
dest_params["read_fault_one_in"] = 0
|
||||
if dest_params.get("compression_max_dict_bytes") == 0:
|
||||
|
@ -655,20 +655,20 @@ def gen_cmd(params, unknown_params):
|
|||
for k, v in [(k, finalzied_params[k]) for k in sorted(finalzied_params)]
|
||||
if k
|
||||
not in {
|
||||
"test_type",
|
||||
"simple",
|
||||
"duration",
|
||||
"interval",
|
||||
"random_kill_odd",
|
||||
"cf_consistency",
|
||||
"txn",
|
||||
"test_best_efforts_recovery",
|
||||
"enable_ts",
|
||||
"test_multiops_txn",
|
||||
"write_policy",
|
||||
"stress_cmd",
|
||||
"test_tiered_storage",
|
||||
}
|
||||
"test_type",
|
||||
"simple",
|
||||
"duration",
|
||||
"interval",
|
||||
"random_kill_odd",
|
||||
"cf_consistency",
|
||||
"txn",
|
||||
"test_best_efforts_recovery",
|
||||
"enable_ts",
|
||||
"test_multiops_txn",
|
||||
"write_policy",
|
||||
"stress_cmd",
|
||||
"test_tiered_storage",
|
||||
}
|
||||
and v is not None
|
||||
]
|
||||
+ unknown_params
|
||||
|
|
Loading…
Reference in a new issue