2022-04-15 20:56:22 +00:00
|
|
|
#!/usr/bin/env python3
|
2019-04-18 17:51:19 +00:00
|
|
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
|
2017-04-04 23:09:31 +00:00
|
|
|
from __future__ import absolute_import
|
|
|
|
from __future__ import division
|
|
|
|
from __future__ import print_function
|
|
|
|
from __future__ import unicode_literals
|
2019-10-23 20:51:03 +00:00
|
|
|
try:
|
|
|
|
from builtins import str
|
|
|
|
except ImportError:
|
|
|
|
from __builtin__ import str
|
2017-04-04 23:09:31 +00:00
|
|
|
from targets_builder import TARGETSBuilder
|
2019-08-02 17:40:32 +00:00
|
|
|
import json
|
2017-04-04 23:09:31 +00:00
|
|
|
import os
|
|
|
|
import fnmatch
|
|
|
|
import sys
|
|
|
|
|
|
|
|
from util import ColorString
|
|
|
|
|
2019-08-02 17:40:32 +00:00
|
|
|
# This script generates TARGETS file for Buck.
|
|
|
|
# Buck is a build tool specifying dependencies among different build targets.
|
|
|
|
# User can pass extra dependencies as a JSON object via command line, and this
|
|
|
|
# script can include these dependencies in the generate TARGETS file.
|
|
|
|
# Usage:
|
2020-05-29 18:24:19 +00:00
|
|
|
# $python3 buckifier/buckify_rocksdb.py
|
2019-08-02 17:40:32 +00:00
|
|
|
# (This generates a TARGET file without user-specified dependency for unit
|
|
|
|
# tests.)
|
2020-05-29 18:24:19 +00:00
|
|
|
# $python3 buckifier/buckify_rocksdb.py \
|
2021-01-28 00:19:43 +00:00
|
|
|
# '{"fake": {
|
|
|
|
# "extra_deps": [":test_dep", "//fakes/module:mock1"],
|
|
|
|
# "extra_compiler_flags": ["-DROCKSDB_LITE", "-Os"]
|
|
|
|
# }
|
2019-08-02 17:40:32 +00:00
|
|
|
# }'
|
|
|
|
# (Generated TARGETS file has test_dep and mock1 as dependencies for RocksDB
|
|
|
|
# unit tests, and will use the extra_compiler_flags to compile the unit test
|
|
|
|
# source.)
|
|
|
|
|
2017-04-04 23:09:31 +00:00
|
|
|
# tests to export as libraries for inclusion in other projects
|
|
|
|
_EXPORTED_TEST_LIBS = ["env_basic_test"]
|
|
|
|
|
|
|
|
# Parse src.mk files as a Dictionary of
|
|
|
|
# VAR_NAME => list of files
|
|
|
|
def parse_src_mk(repo_path):
|
|
|
|
src_mk = repo_path + "/src.mk"
|
|
|
|
src_files = {}
|
|
|
|
for line in open(src_mk):
|
|
|
|
line = line.strip()
|
|
|
|
if len(line) == 0 or line[0] == '#':
|
|
|
|
continue
|
|
|
|
if '=' in line:
|
|
|
|
current_src = line.split('=')[0].strip()
|
|
|
|
src_files[current_src] = []
|
2020-07-01 02:31:57 +00:00
|
|
|
elif '.c' in line:
|
|
|
|
src_path = line.split('\\')[0].strip()
|
2017-04-04 23:09:31 +00:00
|
|
|
src_files[current_src].append(src_path)
|
|
|
|
return src_files
|
|
|
|
|
|
|
|
|
|
|
|
# get all .cc / .c files
|
|
|
|
def get_cc_files(repo_path):
|
|
|
|
cc_files = []
|
2018-01-29 20:43:56 +00:00
|
|
|
for root, dirnames, filenames in os.walk(repo_path): # noqa: B007 T25377293 Grandfathered in
|
2017-04-04 23:09:31 +00:00
|
|
|
root = root[(len(repo_path) + 1):]
|
|
|
|
if "java" in root:
|
|
|
|
# Skip java
|
|
|
|
continue
|
|
|
|
for filename in fnmatch.filter(filenames, '*.cc'):
|
|
|
|
cc_files.append(os.path.join(root, filename))
|
|
|
|
for filename in fnmatch.filter(filenames, '*.c'):
|
|
|
|
cc_files.append(os.path.join(root, filename))
|
|
|
|
return cc_files
|
|
|
|
|
|
|
|
|
2021-04-05 03:09:05 +00:00
|
|
|
# Get non_parallel tests from Makefile
|
|
|
|
def get_non_parallel_tests(repo_path):
|
2017-04-04 23:09:31 +00:00
|
|
|
Makefile = repo_path + "/Makefile"
|
|
|
|
|
2020-07-01 02:31:57 +00:00
|
|
|
s = set({})
|
2017-04-04 23:09:31 +00:00
|
|
|
|
2021-04-05 03:09:05 +00:00
|
|
|
found_non_parallel_tests = False
|
2017-04-04 23:09:31 +00:00
|
|
|
for line in open(Makefile):
|
|
|
|
line = line.strip()
|
2021-04-05 03:09:05 +00:00
|
|
|
if line.startswith("NON_PARALLEL_TEST ="):
|
|
|
|
found_non_parallel_tests = True
|
|
|
|
elif found_non_parallel_tests:
|
2017-04-04 23:09:31 +00:00
|
|
|
if line.endswith("\\"):
|
|
|
|
# remove the trailing \
|
|
|
|
line = line[:-1]
|
|
|
|
line = line.strip()
|
2020-07-01 02:31:57 +00:00
|
|
|
s.add(line)
|
2017-04-04 23:09:31 +00:00
|
|
|
else:
|
2021-04-05 03:09:05 +00:00
|
|
|
# we consumed all the non_parallel tests
|
2017-04-04 23:09:31 +00:00
|
|
|
break
|
2019-04-18 17:51:19 +00:00
|
|
|
|
2020-07-01 02:31:57 +00:00
|
|
|
return s
|
2017-04-04 23:09:31 +00:00
|
|
|
|
2019-08-02 17:40:32 +00:00
|
|
|
# Parse extra dependencies passed by user from command line
|
|
|
|
def get_dependencies():
|
|
|
|
deps_map = {
|
2019-10-23 20:51:03 +00:00
|
|
|
'': {
|
|
|
|
'extra_deps': [],
|
|
|
|
'extra_compiler_flags': []
|
2019-08-02 17:40:32 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
if len(sys.argv) < 2:
|
|
|
|
return deps_map
|
|
|
|
|
|
|
|
def encode_dict(data):
|
|
|
|
rv = {}
|
|
|
|
for k, v in data.items():
|
2019-10-23 20:51:03 +00:00
|
|
|
if isinstance(v, dict):
|
2019-08-02 17:40:32 +00:00
|
|
|
v = encode_dict(v)
|
|
|
|
rv[k] = v
|
|
|
|
return rv
|
|
|
|
extra_deps = json.loads(sys.argv[1], object_hook=encode_dict)
|
|
|
|
for target_alias, deps in extra_deps.items():
|
|
|
|
deps_map[target_alias] = deps
|
|
|
|
return deps_map
|
|
|
|
|
|
|
|
|
2017-04-04 23:09:31 +00:00
|
|
|
# Prepare TARGETS file for buck
|
2019-08-02 17:40:32 +00:00
|
|
|
def generate_targets(repo_path, deps_map):
|
2017-04-04 23:09:31 +00:00
|
|
|
print(ColorString.info("Generating TARGETS"))
|
|
|
|
# parsed src.mk file
|
|
|
|
src_mk = parse_src_mk(repo_path)
|
|
|
|
# get all .cc files
|
|
|
|
cc_files = get_cc_files(repo_path)
|
2021-04-05 03:09:05 +00:00
|
|
|
# get non_parallel tests from Makefile
|
|
|
|
non_parallel_tests = get_non_parallel_tests(repo_path)
|
2017-04-04 23:09:31 +00:00
|
|
|
|
2021-04-05 03:09:05 +00:00
|
|
|
if src_mk is None or cc_files is None or non_parallel_tests is None:
|
2017-04-04 23:09:31 +00:00
|
|
|
return False
|
|
|
|
|
2021-01-28 00:19:43 +00:00
|
|
|
extra_argv = ""
|
|
|
|
if len(sys.argv) >= 2:
|
|
|
|
# Heuristically quote and canonicalize whitespace for inclusion
|
|
|
|
# in how the file was generated.
|
|
|
|
extra_argv = " '{0}'".format(" ".join(sys.argv[1].split()))
|
|
|
|
|
|
|
|
TARGETS = TARGETSBuilder("%s/TARGETS" % repo_path, extra_argv)
|
2020-05-20 18:35:28 +00:00
|
|
|
|
2017-04-04 23:09:31 +00:00
|
|
|
# rocksdb_lib
|
|
|
|
TARGETS.add_library(
|
|
|
|
"rocksdb_lib",
|
|
|
|
src_mk["LIB_SOURCES"] +
|
2021-03-29 23:31:26 +00:00
|
|
|
# always add range_tree, it's only excluded on ppc64, which we don't use internally
|
|
|
|
src_mk["RANGE_TREE_SOURCES"] +
|
Meta-internal folly integration with F14FastMap (#9546)
Summary:
Especially after updating to C++17, I don't see a compelling case for
*requiring* any folly components in RocksDB. I was able to purge the existing
hard dependencies, and it can be quite difficult to strip out non-trivial components
from folly for use in RocksDB. (The prospect of doing that on F14 has changed
my mind on the best approach here.)
But this change creates an optional integration where we can plug in
components from folly at compile time, starting here with F14FastMap to replace
std::unordered_map when possible (probably no public APIs for example). I have
replaced the biggest CPU users of std::unordered_map with compile-time
pluggable UnorderedMap which will use F14FastMap when USE_FOLLY is set.
USE_FOLLY is always set in the Meta-internal buck build, and a simulation of
that is in the Makefile for public CI testing. A full folly build is not needed, but
checking out the full folly repo is much simpler for getting the dependency,
and anything else we might want to optionally integrate in the future.
Some picky details:
* I don't think the distributed mutex stuff is actually used, so it was easy to remove.
* I implemented an alternative to `folly::constexpr_log2` (which is much easier
in C++17 than C++11) so that I could pull out the hard dependencies on
`ConstexprMath.h`
* I had to add noexcept move constructors/operators to some types to make
F14's complainUnlessNothrowMoveAndDestroy check happy, and I added a
macro to make that easier in some common cases.
* Updated Meta-internal buck build to use folly F14Map (always)
No updates to HISTORY.md nor INSTALL.md as this is not (yet?) considered a
production integration for open source users.
Pull Request resolved: https://github.com/facebook/rocksdb/pull/9546
Test Plan:
CircleCI tests updated so that a couple of them use folly.
Most internal unit & stress/crash tests updated to use Meta-internal latest folly.
(Note: they should probably use buck but they currently use Makefile.)
Example performance improvement: when filter partitions are pinned in cache,
they are tracked by PartitionedFilterBlockReader::filter_map_ and we can build
a test that exercises that heavily. Build DB with
```
TEST_TMPDIR=/dev/shm/rocksdb ./db_bench -benchmarks=fillrandom -num=10000000 -disable_wal=1 -write_buffer_size=30000000 -bloom_bits=16 -compaction_style=2 -fifo_compaction_max_table_files_size_mb=10000 -fifo_compaction_allow_compaction=0 -partition_index_and_filters
```
and test with (simultaneous runs with & without folly, ~20 times each to see
convergence)
```
TEST_TMPDIR=/dev/shm/rocksdb ./db_bench_folly -readonly -use_existing_db -benchmarks=readrandom -num=10000000 -bloom_bits=16 -compaction_style=2 -fifo_compaction_max_table_files_size_mb=10000 -fifo_compaction_allow_compaction=0 -partition_index_and_filters -duration=40 -pin_l0_filter_and_index_blocks_in_cache
```
Average ops/s no folly: 26229.2
Average ops/s with folly: 26853.3 (+2.4%)
Reviewed By: ajkr
Differential Revision: D34181736
Pulled By: pdillinger
fbshipit-source-id: ffa6ad5104c2880321d8a1aa7187e00ab0d02e94
2022-04-13 14:34:01 +00:00
|
|
|
src_mk["TOOL_LIB_SOURCES"],
|
|
|
|
deps=["//folly/container:f14_hash"])
|
2020-10-01 05:49:20 +00:00
|
|
|
# rocksdb_whole_archive_lib
|
|
|
|
TARGETS.add_library(
|
|
|
|
"rocksdb_whole_archive_lib",
|
|
|
|
src_mk["LIB_SOURCES"] +
|
2021-03-29 23:31:26 +00:00
|
|
|
# always add range_tree, it's only excluded on ppc64, which we don't use internally
|
|
|
|
src_mk["RANGE_TREE_SOURCES"] +
|
2020-10-01 05:49:20 +00:00
|
|
|
src_mk["TOOL_LIB_SOURCES"],
|
Meta-internal folly integration with F14FastMap (#9546)
Summary:
Especially after updating to C++17, I don't see a compelling case for
*requiring* any folly components in RocksDB. I was able to purge the existing
hard dependencies, and it can be quite difficult to strip out non-trivial components
from folly for use in RocksDB. (The prospect of doing that on F14 has changed
my mind on the best approach here.)
But this change creates an optional integration where we can plug in
components from folly at compile time, starting here with F14FastMap to replace
std::unordered_map when possible (probably no public APIs for example). I have
replaced the biggest CPU users of std::unordered_map with compile-time
pluggable UnorderedMap which will use F14FastMap when USE_FOLLY is set.
USE_FOLLY is always set in the Meta-internal buck build, and a simulation of
that is in the Makefile for public CI testing. A full folly build is not needed, but
checking out the full folly repo is much simpler for getting the dependency,
and anything else we might want to optionally integrate in the future.
Some picky details:
* I don't think the distributed mutex stuff is actually used, so it was easy to remove.
* I implemented an alternative to `folly::constexpr_log2` (which is much easier
in C++17 than C++11) so that I could pull out the hard dependencies on
`ConstexprMath.h`
* I had to add noexcept move constructors/operators to some types to make
F14's complainUnlessNothrowMoveAndDestroy check happy, and I added a
macro to make that easier in some common cases.
* Updated Meta-internal buck build to use folly F14Map (always)
No updates to HISTORY.md nor INSTALL.md as this is not (yet?) considered a
production integration for open source users.
Pull Request resolved: https://github.com/facebook/rocksdb/pull/9546
Test Plan:
CircleCI tests updated so that a couple of them use folly.
Most internal unit & stress/crash tests updated to use Meta-internal latest folly.
(Note: they should probably use buck but they currently use Makefile.)
Example performance improvement: when filter partitions are pinned in cache,
they are tracked by PartitionedFilterBlockReader::filter_map_ and we can build
a test that exercises that heavily. Build DB with
```
TEST_TMPDIR=/dev/shm/rocksdb ./db_bench -benchmarks=fillrandom -num=10000000 -disable_wal=1 -write_buffer_size=30000000 -bloom_bits=16 -compaction_style=2 -fifo_compaction_max_table_files_size_mb=10000 -fifo_compaction_allow_compaction=0 -partition_index_and_filters
```
and test with (simultaneous runs with & without folly, ~20 times each to see
convergence)
```
TEST_TMPDIR=/dev/shm/rocksdb ./db_bench_folly -readonly -use_existing_db -benchmarks=readrandom -num=10000000 -bloom_bits=16 -compaction_style=2 -fifo_compaction_max_table_files_size_mb=10000 -fifo_compaction_allow_compaction=0 -partition_index_and_filters -duration=40 -pin_l0_filter_and_index_blocks_in_cache
```
Average ops/s no folly: 26229.2
Average ops/s with folly: 26853.3 (+2.4%)
Reviewed By: ajkr
Differential Revision: D34181736
Pulled By: pdillinger
fbshipit-source-id: ffa6ad5104c2880321d8a1aa7187e00ab0d02e94
2022-04-13 14:34:01 +00:00
|
|
|
deps=["//folly/container:f14_hash"],
|
2020-10-01 05:49:20 +00:00
|
|
|
headers=None,
|
|
|
|
extra_external_deps="",
|
|
|
|
link_whole=True)
|
2017-04-04 23:09:31 +00:00
|
|
|
# rocksdb_test_lib
|
|
|
|
TARGETS.add_library(
|
|
|
|
"rocksdb_test_lib",
|
|
|
|
src_mk.get("MOCK_LIB_SOURCES", []) +
|
|
|
|
src_mk.get("TEST_LIB_SOURCES", []) +
|
2018-09-25 21:12:12 +00:00
|
|
|
src_mk.get("EXP_LIB_SOURCES", []) +
|
|
|
|
src_mk.get("ANALYZER_LIB_SOURCES", []),
|
2020-06-05 19:14:42 +00:00
|
|
|
[":rocksdb_lib"],
|
2022-02-18 18:59:57 +00:00
|
|
|
extra_test_libs=True
|
|
|
|
)
|
2017-04-04 23:09:31 +00:00
|
|
|
# rocksdb_tools_lib
|
|
|
|
TARGETS.add_library(
|
|
|
|
"rocksdb_tools_lib",
|
|
|
|
src_mk.get("BENCH_LIB_SOURCES", []) +
|
2018-09-25 21:12:12 +00:00
|
|
|
src_mk.get("ANALYZER_LIB_SOURCES", []) +
|
2019-05-30 18:21:38 +00:00
|
|
|
["test_util/testutil.cc"],
|
2017-04-04 23:09:31 +00:00
|
|
|
[":rocksdb_lib"])
|
2021-05-19 22:24:37 +00:00
|
|
|
# rocksdb_cache_bench_tools_lib
|
|
|
|
TARGETS.add_library(
|
|
|
|
"rocksdb_cache_bench_tools_lib",
|
|
|
|
src_mk.get("CACHE_BENCH_LIB_SOURCES", []),
|
|
|
|
[":rocksdb_lib"])
|
2019-10-22 02:38:42 +00:00
|
|
|
# rocksdb_stress_lib
|
2020-05-17 04:46:21 +00:00
|
|
|
TARGETS.add_rocksdb_library(
|
2019-10-22 02:38:42 +00:00
|
|
|
"rocksdb_stress_lib",
|
|
|
|
src_mk.get("ANALYZER_LIB_SOURCES", [])
|
|
|
|
+ src_mk.get('STRESS_LIB_SOURCES', [])
|
2020-05-17 04:46:21 +00:00
|
|
|
+ ["test_util/testutil.cc"])
|
2022-04-14 06:54:35 +00:00
|
|
|
# db_stress binary
|
|
|
|
TARGETS.add_binary("db_stress",
|
|
|
|
["db_stress_tool/db_stress.cc"],
|
|
|
|
[":rocksdb_stress_lib"])
|
2022-02-18 18:59:57 +00:00
|
|
|
# bench binaries
|
|
|
|
for src in src_mk.get("MICROBENCH_SOURCES", []):
|
|
|
|
name = src.rsplit('/',1)[1].split('.')[0] if '/' in src else src.split('.')[0]
|
|
|
|
TARGETS.add_binary(
|
|
|
|
name,
|
|
|
|
[src],
|
|
|
|
[],
|
|
|
|
extra_bench_libs=True
|
|
|
|
)
|
2020-05-13 04:35:08 +00:00
|
|
|
print("Extra dependencies:\n{0}".format(json.dumps(deps_map)))
|
2020-07-01 02:31:57 +00:00
|
|
|
|
|
|
|
# Dictionary test executable name -> relative source file path
|
|
|
|
test_source_map = {}
|
2020-07-03 03:27:31 +00:00
|
|
|
|
|
|
|
# c_test.c is added through TARGETS.add_c_test(). If there
|
|
|
|
# are more than one .c test file, we need to extend
|
|
|
|
# TARGETS.add_c_test() to include other C tests too.
|
|
|
|
for test_src in src_mk.get("TEST_MAIN_SOURCES_C", []):
|
|
|
|
if test_src != 'db/c_test.c':
|
|
|
|
print("Don't know how to deal with " + test_src)
|
|
|
|
return False
|
|
|
|
TARGETS.add_c_test()
|
|
|
|
|
2022-02-18 18:59:57 +00:00
|
|
|
try:
|
|
|
|
with open(f"{repo_path}/buckifier/bench.json") as json_file:
|
|
|
|
fast_fancy_bench_config_list = json.load(json_file)
|
|
|
|
for config_dict in fast_fancy_bench_config_list:
|
2022-03-01 23:09:45 +00:00
|
|
|
clean_benchmarks = {}
|
|
|
|
benchmarks = config_dict['benchmarks']
|
|
|
|
for binary, benchmark_dict in benchmarks.items():
|
|
|
|
clean_benchmarks[binary] = {}
|
|
|
|
for benchmark, overloaded_metric_list in benchmark_dict.items():
|
|
|
|
clean_benchmarks[binary][benchmark] = []
|
|
|
|
for metric in overloaded_metric_list:
|
|
|
|
if not isinstance(metric, dict):
|
|
|
|
clean_benchmarks[binary][benchmark].append(metric)
|
|
|
|
TARGETS.add_fancy_bench_config(config_dict['name'], clean_benchmarks, False, config_dict['expected_runtime_one_iter'], config_dict['sl_iterations'], config_dict['regression_threshold'])
|
2022-02-18 18:59:57 +00:00
|
|
|
|
|
|
|
with open(f"{repo_path}/buckifier/bench-slow.json") as json_file:
|
|
|
|
slow_fancy_bench_config_list = json.load(json_file)
|
|
|
|
for config_dict in slow_fancy_bench_config_list:
|
2022-03-01 23:09:45 +00:00
|
|
|
clean_benchmarks = {}
|
|
|
|
benchmarks = config_dict['benchmarks']
|
|
|
|
for binary, benchmark_dict in benchmarks.items():
|
|
|
|
clean_benchmarks[binary] = {}
|
|
|
|
for benchmark, overloaded_metric_list in benchmark_dict.items():
|
|
|
|
clean_benchmarks[binary][benchmark] = []
|
|
|
|
for metric in overloaded_metric_list:
|
|
|
|
if not isinstance(metric, dict):
|
|
|
|
clean_benchmarks[binary][benchmark].append(metric)
|
|
|
|
for config_dict in slow_fancy_bench_config_list:
|
|
|
|
TARGETS.add_fancy_bench_config(config_dict['name']+"_slow", clean_benchmarks, True, config_dict['expected_runtime_one_iter'], config_dict['sl_iterations'], config_dict['regression_threshold'])
|
|
|
|
# it is better servicelab experiments break
|
|
|
|
# than rocksdb github ci
|
|
|
|
except Exception:
|
2022-02-18 18:59:57 +00:00
|
|
|
pass
|
|
|
|
|
|
|
|
TARGETS.add_test_header()
|
|
|
|
|
2020-07-03 03:27:31 +00:00
|
|
|
for test_src in src_mk.get("TEST_MAIN_SOURCES", []):
|
2020-07-01 02:31:57 +00:00
|
|
|
test = test_src.split('.c')[0].strip().split('/')[-1].strip()
|
|
|
|
test_source_map[test] = test_src
|
|
|
|
print("" + test + " " + test_src)
|
|
|
|
|
2019-08-02 17:40:32 +00:00
|
|
|
for target_alias, deps in deps_map.items():
|
2020-07-01 02:31:57 +00:00
|
|
|
for test, test_src in sorted(test_source_map.items()):
|
|
|
|
if len(test) == 0:
|
|
|
|
print(ColorString.warning("Failed to get test name for %s" % test_src))
|
2019-08-02 17:40:32 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
test_target_name = \
|
|
|
|
test if not target_alias else test + "_" + target_alias
|
|
|
|
|
|
|
|
if test in _EXPORTED_TEST_LIBS:
|
|
|
|
test_library = "%s_lib" % test_target_name
|
2022-02-18 18:59:57 +00:00
|
|
|
TARGETS.add_library(test_library, [test_src], deps=[":rocksdb_test_lib"], extra_test_libs=True)
|
|
|
|
TARGETS.register_test(
|
|
|
|
test_target_name,
|
|
|
|
test_src,
|
|
|
|
deps = json.dumps(deps['extra_deps'] + [':'+test_library]),
|
|
|
|
extra_compiler_flags = json.dumps(deps['extra_compiler_flags']))
|
|
|
|
else:
|
|
|
|
TARGETS.register_test(
|
|
|
|
test_target_name,
|
|
|
|
test_src,
|
|
|
|
deps = json.dumps(deps['extra_deps'] + [":rocksdb_test_lib"] ),
|
|
|
|
extra_compiler_flags = json.dumps(deps['extra_compiler_flags']))
|
2017-04-04 23:09:31 +00:00
|
|
|
|
|
|
|
print(ColorString.info("Generated TARGETS Summary:"))
|
|
|
|
print(ColorString.info("- %d libs" % TARGETS.total_lib))
|
|
|
|
print(ColorString.info("- %d binarys" % TARGETS.total_bin))
|
|
|
|
print(ColorString.info("- %d tests" % TARGETS.total_test))
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
def get_rocksdb_path():
|
|
|
|
# rocksdb = {script_dir}/..
|
|
|
|
script_dir = os.path.dirname(sys.argv[0])
|
|
|
|
script_dir = os.path.abspath(script_dir)
|
|
|
|
rocksdb_path = os.path.abspath(
|
|
|
|
os.path.join(script_dir, "../"))
|
|
|
|
|
|
|
|
return rocksdb_path
|
|
|
|
|
2020-05-20 18:35:28 +00:00
|
|
|
|
2017-04-04 23:09:31 +00:00
|
|
|
def exit_with_error(msg):
|
|
|
|
print(ColorString.error(msg))
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
2019-08-02 17:40:32 +00:00
|
|
|
deps_map = get_dependencies()
|
2017-04-04 23:09:31 +00:00
|
|
|
# Generate TARGETS file for buck
|
2019-08-02 17:40:32 +00:00
|
|
|
ok = generate_targets(get_rocksdb_path(), deps_map)
|
2017-04-04 23:09:31 +00:00
|
|
|
if not ok:
|
|
|
|
exit_with_error("Failed to generate TARGETS files")
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|