mirror of https://github.com/bazelbuild/rules_pkg
Refactor all the input processing code. (#756)
* Refactor all the input processing code. - Create a MappingContext to hold behaviors and defaults - Change calls - from `process_x(ctx, content_map, ..., default_x, default_y, ...)` - to `process_x(mapping_context, ...) 1. This brings the code more in line with the new preferred rule writing style. 1. A major motivation for the change is adding include_runfiles to pkg_tar, where we want to sometimes have add_label_list do the runfiles inclusion for us, and other times we want to revert to legacy behavior. If we always passed it in `ctx`, that would not be possible. * Syle nits * make buildifier happier
This commit is contained in:
parent
8e5570cf44
commit
5664e0ed1b
|
@ -17,28 +17,20 @@ This module provides an interface (`pkg_install`) for creating a `bazel
|
|||
run`-able installation script.
|
||||
"""
|
||||
|
||||
load("//pkg:providers.bzl", "PackageDirsInfo", "PackageFilegroupInfo", "PackageFilesInfo", "PackageSymlinkInfo")
|
||||
load("//pkg/private:pkg_files.bzl", "process_src", "write_manifest")
|
||||
load("@rules_python//python:defs.bzl", "py_binary")
|
||||
load("//pkg:providers.bzl", "PackageDirsInfo", "PackageFilegroupInfo", "PackageFilesInfo", "PackageSymlinkInfo")
|
||||
load("//pkg/private:pkg_files.bzl", "create_mapping_context_from_ctx", "process_src", "write_manifest")
|
||||
|
||||
def _pkg_install_script_impl(ctx):
|
||||
script_file = ctx.actions.declare_file(ctx.attr.name + ".py")
|
||||
|
||||
fragments = []
|
||||
files_to_run = []
|
||||
content_map = {}
|
||||
mapping_context = create_mapping_context_from_ctx(ctx, label = ctx.label, default_mode = "0644")
|
||||
for src in ctx.attr.srcs:
|
||||
process_src(
|
||||
ctx,
|
||||
content_map,
|
||||
files_to_run,
|
||||
mapping_context,
|
||||
src = src,
|
||||
origin = src.label,
|
||||
default_mode = "0644",
|
||||
default_user = None,
|
||||
default_group = None,
|
||||
default_uid = None,
|
||||
default_gid = None,
|
||||
)
|
||||
|
||||
manifest_file = ctx.actions.declare_file(ctx.attr.name + "-install-manifest.json")
|
||||
|
@ -49,14 +41,14 @@ def _pkg_install_script_impl(ctx):
|
|||
# Note that these paths are different when used as tools run within a build.
|
||||
# See also
|
||||
# https://docs.bazel.build/versions/4.1.0/skylark/rules.html#tools-with-runfiles
|
||||
write_manifest(ctx, manifest_file, content_map, use_short_path = True)
|
||||
write_manifest(ctx, manifest_file, mapping_context.content_map, use_short_path = True)
|
||||
|
||||
# Get the label of the actual py_binary used to run this script.
|
||||
#
|
||||
# This is super brittle, but I don't know how to otherwise get this
|
||||
# information without creating a circular dependency given the current state
|
||||
# of rules_python.
|
||||
|
||||
|
||||
# The name of the binary is the name of this target, minus
|
||||
# "_install_script".
|
||||
label_str = str(ctx.label)[:-len("_install_script")]
|
||||
|
@ -77,7 +69,7 @@ def _pkg_install_script_impl(ctx):
|
|||
|
||||
my_runfiles = ctx.runfiles(
|
||||
files = [manifest_file],
|
||||
transitive_files = depset(transitive = files_to_run),
|
||||
transitive_files = depset(transitive = mapping_context.file_deps),
|
||||
)
|
||||
|
||||
return [
|
||||
|
@ -147,7 +139,7 @@ def pkg_install(name, srcs, **kwargs):
|
|||
```
|
||||
bazel run -- //path/to:install --help
|
||||
```
|
||||
|
||||
|
||||
WARNING: While this rule does function when being run from within a bazel
|
||||
rule, such use is not recommended. If you do, **always** use the
|
||||
`--destdir` argument to specify the desired location for the installation to
|
||||
|
|
26
pkg/path.bzl
26
pkg/path.bzl
|
@ -11,8 +11,10 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Helper functions that don't depend on Skylark, so can be unit tested."""
|
||||
"""Helper functions that don't depend on Starlark, so can be unit tested."""
|
||||
|
||||
# buildifier: disable=function-docstring-args
|
||||
# buildifier: disable=function-docstring-return
|
||||
def safe_short_path(file_):
|
||||
"""Like `File.short_path` but safe for use with files from external repositories.
|
||||
"""
|
||||
|
@ -27,9 +29,10 @@ def safe_short_path(file_):
|
|||
# Beginning with `file_.path`, remove optional `F.root.path`.
|
||||
working_path = file_.path
|
||||
if not file_.is_source:
|
||||
working_path = working_path[len(file_.root.path)+1:]
|
||||
working_path = working_path[len(file_.root.path) + 1:]
|
||||
return working_path
|
||||
|
||||
# buildifier: disable=function-docstring-args,function-docstring-return
|
||||
def _short_path_dirname(path):
|
||||
"""Returns the directory's name of the short path of an artifact."""
|
||||
sp = safe_short_path(path)
|
||||
|
@ -39,6 +42,8 @@ def _short_path_dirname(path):
|
|||
return ""
|
||||
return sp[:last_pkg]
|
||||
|
||||
# buildifier: disable=function-docstring-args
|
||||
# buildifier: disable=function-docstring-return
|
||||
def dest_path(f, strip_prefix, data_path_without_prefix = ""):
|
||||
"""Returns the short path of f, stripped of strip_prefix."""
|
||||
f_short_path = safe_short_path(f)
|
||||
|
@ -56,32 +61,33 @@ def dest_path(f, strip_prefix, data_path_without_prefix = ""):
|
|||
|
||||
# Avoid stripping prefix if final directory is incomplete
|
||||
if prefix_last_dir not in f_short_path.split("/"):
|
||||
strip_prefix = data_path_without_prefix
|
||||
strip_prefix = data_path_without_prefix
|
||||
|
||||
return f_short_path[len(strip_prefix):]
|
||||
return f_short_path
|
||||
|
||||
def compute_data_path(ctx, data_path):
|
||||
def compute_data_path(label, data_path):
|
||||
"""Compute the relative data path prefix from the data_path attribute.
|
||||
|
||||
Args:
|
||||
ctx: rule implementation ctx.
|
||||
data_path: path to a file, relative to the package of the rule ctx.
|
||||
label: target label
|
||||
data_path: path to a file, relative to the package of the label.
|
||||
Returns:
|
||||
str
|
||||
"""
|
||||
build_dir = ctx.label.package
|
||||
if data_path:
|
||||
# Strip ./ from the beginning if specified.
|
||||
# There is no way to handle .// correctly (no function that would make
|
||||
# that possible and Skylark is not turing complete) so just consider it
|
||||
# that possible and Starlark is not turing complete) so just consider it
|
||||
# as an absolute path.
|
||||
if len(data_path) >= 2 and data_path[0:2] == "./":
|
||||
data_path = data_path[2:]
|
||||
if not data_path or data_path == ".": # Relative to current package
|
||||
return build_dir
|
||||
return label.package
|
||||
elif data_path[0] == "/": # Absolute path
|
||||
return data_path[1:]
|
||||
else: # Relative to a sub-directory
|
||||
tmp_short_path_dirname = build_dir
|
||||
tmp_short_path_dirname = label.package
|
||||
if tmp_short_path_dirname:
|
||||
return tmp_short_path_dirname + "/" + data_path
|
||||
return data_path
|
||||
|
|
|
@ -43,9 +43,10 @@ load(
|
|||
ENTRY_IS_FILE = "file" # Entry is a file: take content from <src>
|
||||
ENTRY_IS_LINK = "symlink" # Entry is a symlink: dest -> <src>
|
||||
ENTRY_IS_DIR = "dir" # Entry is an empty dir
|
||||
ENTRY_IS_TREE = "tree" # Entry is a tree artifact: take tree from <src>
|
||||
ENTRY_IS_TREE = "tree" # Entry is a tree artifact: take tree from <src>
|
||||
ENTRY_IS_EMPTY_FILE = "empty-file" # Entry is a an empty file
|
||||
|
||||
# buildifier: disable=name-conventions
|
||||
_DestFile = provider(
|
||||
doc = """Information about each destination in the final package.""",
|
||||
fields = {
|
||||
|
@ -61,7 +62,69 @@ _DestFile = provider(
|
|||
},
|
||||
)
|
||||
|
||||
def _check_dest(ctx, content_map, dest, src, origin):
|
||||
# buildifier: disable=name-conventions
|
||||
_MappingContext = provider(
|
||||
doc = """Fields passed to process_* methods.""",
|
||||
fields = {
|
||||
"content_map": "in/out The content_map we are building up",
|
||||
"file_deps": "in/out list of file Depsets represented in the map",
|
||||
"label": "ctx.label",
|
||||
|
||||
# Behaviors
|
||||
"allow_duplicates_with_different_content": "bool: don't fail when you double mapped files",
|
||||
"include_runfiles": "bool: include runfiles",
|
||||
"strip_prefix": "strip_prefix",
|
||||
|
||||
# Defaults
|
||||
"default_mode": "Default mode to apply to file without a mode setting",
|
||||
"default_user": "Default user name to apply to file without a user",
|
||||
"default_group": "Default group name to apply to file without a group",
|
||||
"default_uid": "Default numeric uid to apply to file without a uid",
|
||||
"default_gid": "Default numeric gid to apply to file without a gid",
|
||||
},
|
||||
)
|
||||
|
||||
# buildifier: disable=function-docstring-args
|
||||
def create_mapping_context_from_ctx(
|
||||
ctx,
|
||||
label,
|
||||
allow_duplicates_with_different_content = None,
|
||||
strip_prefix = None,
|
||||
include_runfiles = None,
|
||||
default_mode = None):
|
||||
"""Construct a MappingContext.
|
||||
|
||||
Args: See the provider definition.
|
||||
|
||||
Returns:
|
||||
_MappingContext
|
||||
"""
|
||||
if allow_duplicates_with_different_content == None:
|
||||
allow_duplicates_with_different_content = ctx.attr.allow_duplicates_with_different_content if hasattr(ctx.attr, "allow_duplicates_with_different_content") else False
|
||||
if strip_prefix == None:
|
||||
strip_prefix = ctx.attr.strip_prefix if hasattr(ctx.attr, "strip_prefix") else ""
|
||||
if include_runfiles == None:
|
||||
include_runfiles = ctx.attr.include_runfiles if hasattr(ctx.attr, "include_runfiles") else False
|
||||
if default_mode == None:
|
||||
default_mode = ctx.attr.mode if hasattr(ctx.attr, "default_mode") else ""
|
||||
|
||||
return _MappingContext(
|
||||
content_map = dict(),
|
||||
file_deps = list(),
|
||||
label = label,
|
||||
allow_duplicates_with_different_content = allow_duplicates_with_different_content,
|
||||
strip_prefix = strip_prefix,
|
||||
include_runfiles = include_runfiles,
|
||||
default_mode = default_mode,
|
||||
# TODO(aiuto): allow these to be passed in as needed. But, before doing
|
||||
# that, explore defauilt_uid/gid as 0 rather than None
|
||||
default_user = "",
|
||||
default_group = "",
|
||||
default_uid = None,
|
||||
default_gid = None,
|
||||
)
|
||||
|
||||
def _check_dest(content_map, dest, src, origin, allow_duplicates_with_different_content = False):
|
||||
old_entry = content_map.get(dest)
|
||||
if not old_entry:
|
||||
return
|
||||
|
@ -74,12 +137,12 @@ def _check_dest(ctx, content_map, dest, src, origin):
|
|||
# brings in the file with a different owner.
|
||||
if old_entry.src.path != src.path:
|
||||
msg = "Duplicate output path: <%s>, declared in %s and %s\n SRC: %s" % (
|
||||
dest,
|
||||
origin,
|
||||
content_map[dest].origin,
|
||||
src,
|
||||
)
|
||||
if ctx.attr.allow_duplicates_with_different_content:
|
||||
dest,
|
||||
origin,
|
||||
content_map[dest].origin,
|
||||
src,
|
||||
)
|
||||
if allow_duplicates_with_different_content:
|
||||
# buildifier: disable=print
|
||||
print("WARNING:", msg)
|
||||
else:
|
||||
|
@ -97,18 +160,32 @@ def _merge_attributes(info, mode, user, group, uid, gid):
|
|||
|
||||
new_uid = attrs.get("uid")
|
||||
if new_uid != None:
|
||||
uid = new_uid
|
||||
uid = new_uid
|
||||
new_gid = attrs.get("gid")
|
||||
if new_gid != None:
|
||||
gid = new_gid
|
||||
gid = new_gid
|
||||
return (mode, user, group, uid, gid)
|
||||
|
||||
def _process_pkg_dirs(ctx, content_map, pkg_dirs_info, origin, default_mode, default_user, default_group, default_uid, default_gid):
|
||||
attrs = _merge_attributes(pkg_dirs_info, default_mode, default_user, default_group, default_uid, default_gid)
|
||||
def _merge_context_attributes(info, mapping_context):
|
||||
"""Merge defaults from mapping context with those in the source provider.
|
||||
|
||||
Args:
|
||||
info: provider from a pkt_* target
|
||||
mapping_context: MappingContext with the defaults.
|
||||
"""
|
||||
default_mode = mapping_context.default_mode if hasattr(mapping_context, "default_mode") else ""
|
||||
default_user = mapping_context.default_user if hasattr(mapping_context, "default_user") else ""
|
||||
default_group = mapping_context.default_group if hasattr(mapping_context, "default_group") else ""
|
||||
default_uid = mapping_context.default_uid if hasattr(mapping_context, "default_uid") else ""
|
||||
default_gid = mapping_context.default_gid if hasattr(mapping_context, "default_gid") else ""
|
||||
return _merge_attributes(info, default_mode, default_user, default_group, default_uid, default_gid)
|
||||
|
||||
def _process_pkg_dirs(mapping_context, pkg_dirs_info, origin):
|
||||
attrs = _merge_context_attributes(pkg_dirs_info, mapping_context)
|
||||
for dir in pkg_dirs_info.dirs:
|
||||
dest = dir.strip("/")
|
||||
_check_dest(ctx, content_map, dest, None, origin)
|
||||
content_map[dest] = _DestFile(
|
||||
_check_dest(mapping_context.content_map, dest, None, origin, mapping_context.allow_duplicates_with_different_content)
|
||||
mapping_context.content_map[dest] = _DestFile(
|
||||
src = None,
|
||||
entry_type = ENTRY_IS_DIR,
|
||||
mode = attrs[0],
|
||||
|
@ -119,12 +196,12 @@ def _process_pkg_dirs(ctx, content_map, pkg_dirs_info, origin, default_mode, def
|
|||
origin = origin,
|
||||
)
|
||||
|
||||
def _process_pkg_files(ctx, content_map, pkg_files_info, origin, default_mode, default_user, default_group, default_uid, default_gid):
|
||||
attrs = _merge_attributes(pkg_files_info, default_mode, default_user, default_group, default_uid, default_gid)
|
||||
def _process_pkg_files(mapping_context, pkg_files_info, origin):
|
||||
attrs = _merge_context_attributes(pkg_files_info, mapping_context)
|
||||
for filename, src in pkg_files_info.dest_src_map.items():
|
||||
dest = filename.strip("/")
|
||||
_check_dest(ctx, content_map, dest, src, origin)
|
||||
content_map[dest] = _DestFile(
|
||||
_check_dest(mapping_context.content_map, dest, src, origin, mapping_context.allow_duplicates_with_different_content)
|
||||
mapping_context.content_map[dest] = _DestFile(
|
||||
src = src,
|
||||
entry_type = ENTRY_IS_TREE if src.is_directory else ENTRY_IS_FILE,
|
||||
mode = attrs[0],
|
||||
|
@ -135,11 +212,11 @@ def _process_pkg_files(ctx, content_map, pkg_files_info, origin, default_mode, d
|
|||
origin = origin,
|
||||
)
|
||||
|
||||
def _process_pkg_symlink(ctx, content_map, pkg_symlink_info, origin, default_mode, default_user, default_group, default_uid, default_gid):
|
||||
def _process_pkg_symlink(mapping_context, pkg_symlink_info, origin):
|
||||
dest = pkg_symlink_info.destination.strip("/")
|
||||
attrs = _merge_attributes(pkg_symlink_info, default_mode, default_user, default_group, default_uid, default_gid)
|
||||
_check_dest(ctx, content_map, dest, None, origin)
|
||||
content_map[dest] = _DestFile(
|
||||
attrs = _merge_context_attributes(pkg_symlink_info, mapping_context)
|
||||
_check_dest(mapping_context.content_map, dest, None, origin, mapping_context.allow_duplicates_with_different_content)
|
||||
mapping_context.content_map[dest] = _DestFile(
|
||||
src = None,
|
||||
entry_type = ENTRY_IS_LINK,
|
||||
mode = attrs[0],
|
||||
|
@ -151,40 +228,24 @@ def _process_pkg_symlink(ctx, content_map, pkg_symlink_info, origin, default_mod
|
|||
link_to = pkg_symlink_info.target,
|
||||
)
|
||||
|
||||
def _process_pkg_filegroup(ctx, content_map, pkg_filegroup_info, origin, default_mode, default_user, default_group, default_uid, default_gid):
|
||||
def _process_pkg_filegroup(mapping_context, pkg_filegroup_info, origin):
|
||||
if hasattr(pkg_filegroup_info, "pkg_dirs"):
|
||||
for d in pkg_filegroup_info.pkg_dirs:
|
||||
_process_pkg_dirs(ctx, content_map, d[0], d[1], default_mode, default_user, default_group, default_uid, default_gid)
|
||||
_process_pkg_dirs(mapping_context, d[0], d[1])
|
||||
if hasattr(pkg_filegroup_info, "pkg_files"):
|
||||
for pf in pkg_filegroup_info.pkg_files:
|
||||
_process_pkg_files(ctx, content_map, pf[0], pf[1], default_mode, default_user, default_group, default_uid, default_gid)
|
||||
_process_pkg_files(mapping_context, pf[0], pf[1])
|
||||
if hasattr(pkg_filegroup_info, "pkg_symlinks"):
|
||||
for psl in pkg_filegroup_info.pkg_symlinks:
|
||||
_process_pkg_symlink(ctx, content_map, psl[0], psl[1], default_mode, default_user, default_group, default_uid, default_gid)
|
||||
_process_pkg_symlink(mapping_context, psl[0], psl[1])
|
||||
|
||||
def process_src(
|
||||
ctx,
|
||||
content_map,
|
||||
files,
|
||||
src,
|
||||
origin,
|
||||
default_mode,
|
||||
default_user,
|
||||
default_group,
|
||||
default_uid = None,
|
||||
default_gid = None):
|
||||
def process_src(mapping_context, src, origin):
|
||||
"""Add an entry to the content map.
|
||||
|
||||
Args:
|
||||
content_map: in/out The content map
|
||||
files: in/out list of file Depsets represented in the map
|
||||
mapping_context: (r/w) a MappingContext
|
||||
src: Source Package*Info object
|
||||
origin: The rule instance adding this entry
|
||||
default_mode: fallback mode to use for Package*Info elements without mode
|
||||
default_user: fallback user to use for Package*Info elements without user
|
||||
default_group: fallback mode to use for Package*Info elements without group
|
||||
default_uid: fallback uid to use for Package*Info elements without uid
|
||||
default_gid: fallback gid to use for Package*Info elements without gid
|
||||
|
||||
Returns:
|
||||
True if src was a Package*Info and added to content_map.
|
||||
|
@ -193,199 +254,134 @@ def process_src(
|
|||
# Gather the files for every srcs entry here, even if it is not from
|
||||
# a pkg_* rule.
|
||||
if DefaultInfo in src:
|
||||
files.append(src[DefaultInfo].files)
|
||||
mapping_context.file_deps.append(src[DefaultInfo].files)
|
||||
found_info = False
|
||||
if PackageFilesInfo in src:
|
||||
_process_pkg_files(
|
||||
ctx,
|
||||
content_map,
|
||||
mapping_context,
|
||||
src[PackageFilesInfo],
|
||||
origin,
|
||||
default_mode = default_mode,
|
||||
default_user = default_user,
|
||||
default_group = default_group,
|
||||
default_uid = default_uid,
|
||||
default_gid = default_gid,
|
||||
)
|
||||
found_info = True
|
||||
if PackageFilegroupInfo in src:
|
||||
_process_pkg_filegroup(
|
||||
ctx,
|
||||
content_map,
|
||||
mapping_context,
|
||||
src[PackageFilegroupInfo],
|
||||
origin,
|
||||
default_mode = default_mode,
|
||||
default_user = default_user,
|
||||
default_group = default_group,
|
||||
default_uid = default_uid,
|
||||
default_gid = default_gid,
|
||||
)
|
||||
found_info = True
|
||||
if PackageSymlinkInfo in src:
|
||||
_process_pkg_symlink(
|
||||
ctx,
|
||||
content_map,
|
||||
mapping_context,
|
||||
src[PackageSymlinkInfo],
|
||||
origin,
|
||||
default_mode = default_mode,
|
||||
default_user = default_user,
|
||||
default_group = default_group,
|
||||
default_uid = default_uid,
|
||||
default_gid = default_gid,
|
||||
)
|
||||
found_info = True
|
||||
if PackageDirsInfo in src:
|
||||
_process_pkg_dirs(
|
||||
ctx,
|
||||
content_map,
|
||||
mapping_context,
|
||||
src[PackageDirsInfo],
|
||||
origin,
|
||||
default_mode = "0555",
|
||||
default_user = default_user,
|
||||
default_group = default_group,
|
||||
default_uid = default_uid,
|
||||
default_gid = default_gid,
|
||||
)
|
||||
found_info = True
|
||||
return found_info
|
||||
|
||||
def add_directory(content_map, dir_path, origin, mode = None, user = None, group = None, uid = None, gid = None):
|
||||
def add_directory(mapping_context, dir_path, origin, mode = None, user = None, group = None, uid = None, gid = None):
|
||||
"""Add an empty directory to the content map.
|
||||
|
||||
Args:
|
||||
content_map: The content map
|
||||
mapping_context: (r/w) a MappingContext
|
||||
dir_path: Where to place the file in the package.
|
||||
origin: The rule instance adding this entry
|
||||
mode: fallback mode to use for Package*Info elements without mode
|
||||
user: fallback user to use for Package*Info elements without user
|
||||
group: fallback mode to use for Package*Info elements without group
|
||||
uid: numeric uid
|
||||
gid: numeric gid
|
||||
"""
|
||||
content_map[dir_path.strip("/")] = _DestFile(
|
||||
mapping_context.content_map[dir_path.strip("/")] = _DestFile(
|
||||
src = None,
|
||||
entry_type = ENTRY_IS_DIR,
|
||||
origin = origin,
|
||||
mode = mode,
|
||||
user = user,
|
||||
group = group,
|
||||
uid = uid,
|
||||
gid = gid,
|
||||
user = user or mapping_context.default_user,
|
||||
group = group or mapping_context.default_group,
|
||||
uid = uid or mapping_context.default_uid,
|
||||
gid = gid or mapping_context.default_gid,
|
||||
)
|
||||
|
||||
def add_empty_file(ctx, content_map, dest_path, origin, mode = None, user = None, group = None, uid = None, gid = None):
|
||||
def add_empty_file(mapping_context, dest_path, origin, mode = None, user = None, group = None, uid = None, gid = None):
|
||||
"""Add a single file to the content map.
|
||||
|
||||
Args:
|
||||
ctx: rule context.
|
||||
content_map: The content map
|
||||
mapping_context: (r/w) a MappingContext
|
||||
dest_path: Where to place the file in the package.
|
||||
origin: The rule instance adding this entry
|
||||
mode: fallback mode to use for Package*Info elements without mode
|
||||
user: fallback user to use for Package*Info elements without user
|
||||
group: fallback mode to use for Package*Info elements without group
|
||||
uid: numeric uid
|
||||
gid: numeric gid
|
||||
"""
|
||||
dest = dest_path.strip("/")
|
||||
_check_dest(ctx, content_map, dest, None, origin)
|
||||
content_map[dest] = _DestFile(
|
||||
_check_dest(mapping_context.content_map, dest, None, origin)
|
||||
mapping_context.content_map[dest] = _DestFile(
|
||||
src = None,
|
||||
entry_type = ENTRY_IS_EMPTY_FILE,
|
||||
origin = origin,
|
||||
mode = mode,
|
||||
user = user,
|
||||
group = group,
|
||||
uid = uid,
|
||||
gid = gid,
|
||||
user = user or mapping_context.default_user,
|
||||
group = group or mapping_context.default_group,
|
||||
uid = uid or mapping_context.default_uid,
|
||||
gid = gid or mapping_context.default_gid,
|
||||
)
|
||||
|
||||
def add_label_list(
|
||||
ctx,
|
||||
content_map,
|
||||
file_deps,
|
||||
srcs,
|
||||
default_mode = None,
|
||||
default_user = None,
|
||||
default_group = None,
|
||||
default_uid = None,
|
||||
default_gid = None):
|
||||
def add_label_list(mapping_context, srcs):
|
||||
"""Helper method to add a list of labels (typically 'srcs') to a content_map.
|
||||
|
||||
Args:
|
||||
ctx: rule context.
|
||||
content_map: (r/w) The content map to update.
|
||||
file_deps: (r/w) The list of file Depsets that srcs depend on.
|
||||
mapping_context: (r/w) a MappingContext
|
||||
srcs: List of source objects.
|
||||
default_mode: fallback mode to use for Package*Info elements without mode
|
||||
default_user: fallback user to use for Package*Info elements without user
|
||||
default_group: fallback mode to use for Package*Info elements without group
|
||||
default_uid: fallback uid to use for Package*Info elements without uid
|
||||
default_gid: fallback gid to use for Package*Info elements without guid
|
||||
"""
|
||||
|
||||
if hasattr(ctx.attr, "include_runfiles"):
|
||||
include_runfiles = ctx.attr.include_runfiles
|
||||
else:
|
||||
include_runfiles = False
|
||||
|
||||
# Compute the relative path
|
||||
data_path = compute_data_path(
|
||||
ctx,
|
||||
ctx.attr.strip_prefix if hasattr(ctx.attr, "strip_prefix") else "",
|
||||
mapping_context.label,
|
||||
mapping_context.strip_prefix,
|
||||
)
|
||||
data_path_without_prefix = compute_data_path(
|
||||
mapping_context.label,
|
||||
".",
|
||||
)
|
||||
data_path_without_prefix = compute_data_path(ctx, ".")
|
||||
|
||||
for src in srcs:
|
||||
if not process_src(
|
||||
ctx,
|
||||
content_map,
|
||||
file_deps,
|
||||
mapping_context,
|
||||
src = src,
|
||||
origin = src.label,
|
||||
default_mode = default_mode,
|
||||
default_user = default_user,
|
||||
default_group = default_group,
|
||||
default_uid = default_uid,
|
||||
default_gid = default_gid,
|
||||
):
|
||||
# Add in the files of srcs which are not pkg_* types
|
||||
add_from_default_info(
|
||||
ctx,
|
||||
content_map,
|
||||
file_deps,
|
||||
mapping_context,
|
||||
src,
|
||||
data_path,
|
||||
data_path_without_prefix,
|
||||
default_mode = default_mode,
|
||||
default_user = default_user,
|
||||
default_group = default_group,
|
||||
default_uid = default_uid,
|
||||
default_gid = default_gid,
|
||||
include_runfiles = include_runfiles,
|
||||
include_runfiles = mapping_context.include_runfiles,
|
||||
)
|
||||
|
||||
def add_from_default_info(
|
||||
ctx,
|
||||
content_map,
|
||||
file_deps,
|
||||
mapping_context,
|
||||
src,
|
||||
data_path,
|
||||
data_path_without_prefix,
|
||||
default_mode = None,
|
||||
default_user = None,
|
||||
default_group = None,
|
||||
default_uid = None,
|
||||
default_gid = None,
|
||||
include_runfiles = False):
|
||||
"""Helper method to add the DefaultInfo of a target to a content_map.
|
||||
|
||||
Args:
|
||||
ctx: rule context.
|
||||
content_map: (r/w) The content map to update.
|
||||
file_deps: (r/w) The list of file Depsets that srcs depend on.
|
||||
mapping_context: (r/w) a MappingContext
|
||||
src: A source object.
|
||||
data_path: path to package
|
||||
data_path_without_prefix: path to the package after prefix stripping
|
||||
default_mode: fallback mode to use for Package*Info elements without mode
|
||||
default_user: fallback user to use for Package*Info elements without user
|
||||
default_group: fallback mode to use for Package*Info elements without group
|
||||
include_runfiles: Include runfiles
|
||||
"""
|
||||
if not DefaultInfo in src:
|
||||
|
@ -398,25 +394,24 @@ def add_from_default_info(
|
|||
d_path = dest_path(f, data_path, data_path_without_prefix)
|
||||
if f.is_directory:
|
||||
add_tree_artifact(
|
||||
content_map,
|
||||
mapping_context.content_map,
|
||||
d_path,
|
||||
f,
|
||||
origin = src.label,
|
||||
mode = default_mode,
|
||||
user = default_user,
|
||||
group = default_group,
|
||||
mode = mapping_context.default_mode,
|
||||
user = mapping_context.default_user,
|
||||
group = mapping_context.default_group,
|
||||
)
|
||||
else:
|
||||
fmode = "0755" if f == the_executable else default_mode
|
||||
fmode = "0755" if f == the_executable else mapping_context.default_mode
|
||||
add_single_file(
|
||||
ctx,
|
||||
content_map,
|
||||
mapping_context,
|
||||
dest_path = d_path,
|
||||
src = f,
|
||||
origin = src.label,
|
||||
mode = fmode,
|
||||
user = default_user,
|
||||
group = default_group,
|
||||
user = mapping_context.default_user,
|
||||
group = mapping_context.default_group,
|
||||
)
|
||||
if include_runfiles:
|
||||
runfiles = src[DefaultInfo].default_runfiles
|
||||
|
@ -424,17 +419,17 @@ def add_from_default_info(
|
|||
base_path = d_path + ".runfiles"
|
||||
for rf in runfiles.files.to_list():
|
||||
d_path = base_path + "/" + rf.short_path
|
||||
fmode = "0755" if rf == the_executable else default_mode
|
||||
_check_dest(ctx, content_map, d_path, rf, src.label)
|
||||
content_map[d_path] = _DestFile(
|
||||
fmode = "0755" if rf == the_executable else mapping_context.default_mode
|
||||
_check_dest(mapping_context.content_map, d_path, rf, src.label, mapping_context.allow_duplicates_with_different_content)
|
||||
mapping_context.content_map[d_path] = _DestFile(
|
||||
src = rf,
|
||||
entry_type = ENTRY_IS_FILE,
|
||||
origin = src.label,
|
||||
mode = fmode,
|
||||
user = default_user,
|
||||
group = default_group,
|
||||
uid = default_uid,
|
||||
gid = default_gid,
|
||||
user = mapping_context.default_user,
|
||||
group = mapping_context.default_group,
|
||||
uid = mapping_context.default_uid,
|
||||
gid = mapping_context.default_gid,
|
||||
)
|
||||
|
||||
def get_my_executable(src):
|
||||
|
@ -466,59 +461,58 @@ def get_my_executable(src):
|
|||
return ftr.executable
|
||||
return None
|
||||
|
||||
|
||||
def add_single_file(ctx, content_map, dest_path, src, origin, mode = None, user = None, group = None, uid = None, gid = None):
|
||||
def add_single_file(mapping_context, dest_path, src, origin, mode = None, user = None, group = None, uid = None, gid = None):
|
||||
"""Add an single file to the content map.
|
||||
|
||||
Args:
|
||||
ctx: rule context.
|
||||
content_map: The content map
|
||||
mapping_context: the MappingContext
|
||||
dest_path: Where to place the file in the package.
|
||||
src: Source object. Must have len(src[DefaultInfo].files) == 1
|
||||
origin: The rule instance adding this entry
|
||||
mode: fallback mode to use for Package*Info elements without mode
|
||||
user: fallback user to use for Package*Info elements without user
|
||||
group: fallback mode to use for Package*Info elements without group
|
||||
uid: numeric uid
|
||||
gid: numeric gid
|
||||
"""
|
||||
dest = dest_path.strip("/")
|
||||
_check_dest(ctx, content_map, dest, src, origin)
|
||||
content_map[dest] = _DestFile(
|
||||
_check_dest(mapping_context.content_map, dest, src, origin, mapping_context.allow_duplicates_with_different_content)
|
||||
mapping_context.content_map[dest] = _DestFile(
|
||||
src = src,
|
||||
entry_type = ENTRY_IS_FILE,
|
||||
origin = origin,
|
||||
mode = mode,
|
||||
user = user,
|
||||
group = group,
|
||||
uid = uid,
|
||||
gid = gid,
|
||||
user = user or mapping_context.default_user,
|
||||
group = group or mapping_context.default_group,
|
||||
uid = uid or mapping_context.default_uid,
|
||||
gid = gid or mapping_context.default_gid,
|
||||
)
|
||||
|
||||
def add_symlink(ctx, content_map, dest_path, src, origin, mode = None, user = None, group = None, uid = None, gid = None):
|
||||
|
||||
def add_symlink(mapping_context, dest_path, src, origin):
|
||||
"""Add a symlink to the content map.
|
||||
|
||||
TODO(aiuto): This is a vestage left from the pkg_tar use. We could
|
||||
converge code by having pkg_tar be a macro that expands symlinks to
|
||||
pkg_symlink targets and srcs them in.
|
||||
|
||||
Args:
|
||||
ctx: rule context.
|
||||
content_map: The content map
|
||||
mapping_context: the MappingContext
|
||||
dest_path: Where to place the file in the package.
|
||||
src: Path to link to.
|
||||
origin: The rule instance adding this entry
|
||||
mode: fallback mode to use for Package*Info elements without mode
|
||||
user: fallback user to use for Package*Info elements without user
|
||||
group: fallback mode to use for Package*Info elements without group
|
||||
"""
|
||||
dest = dest_path.strip("/")
|
||||
_check_dest(ctx, content_map, dest, None, origin)
|
||||
content_map[dest] = _DestFile(
|
||||
_check_dest(mapping_context.content_map, dest, None, origin)
|
||||
mapping_context.content_map[dest] = _DestFile(
|
||||
src = None,
|
||||
link_to = src,
|
||||
entry_type = ENTRY_IS_LINK,
|
||||
origin = origin,
|
||||
mode = mode,
|
||||
user = user,
|
||||
group = group,
|
||||
uid = uid,
|
||||
gid = gid,
|
||||
mode = mapping_context.default_mode,
|
||||
user = mapping_context.default_user,
|
||||
group = mapping_context.default_group,
|
||||
uid = mapping_context.default_uid,
|
||||
gid = mapping_context.default_gid,
|
||||
)
|
||||
|
||||
def add_tree_artifact(content_map, dest_path, src, origin, mode = None, user = None, group = None, uid = None, gid = None):
|
||||
|
@ -530,8 +524,10 @@ def add_tree_artifact(content_map, dest_path, src, origin, mode = None, user = N
|
|||
src: Source object. Must have len(src[DefaultInfo].files) == 1
|
||||
origin: The rule instance adding this entry
|
||||
mode: fallback mode to use for Package*Info elements without mode
|
||||
user: fallback user to use for Package*Info elements without user
|
||||
group: fallback mode to use for Package*Info elements without group
|
||||
user: User name for the entry (probably unused)
|
||||
group: group name for the entry (probably unused)
|
||||
uid: User id for the entry (probably unused)
|
||||
gid: Group id for the entry (probably unused)
|
||||
"""
|
||||
content_map[dest_path] = _DestFile(
|
||||
src = src,
|
||||
|
@ -544,7 +540,7 @@ def add_tree_artifact(content_map, dest_path, src, origin, mode = None, user = N
|
|||
gid = gid,
|
||||
)
|
||||
|
||||
def write_manifest(ctx, manifest_file, content_map, use_short_path=False, pretty_print=False):
|
||||
def write_manifest(ctx, manifest_file, content_map, use_short_path = False, pretty_print = False):
|
||||
"""Write a content map to a manifest file.
|
||||
|
||||
The format of this file is currently undocumented, as it is a private
|
||||
|
@ -558,6 +554,7 @@ def write_manifest(ctx, manifest_file, content_map, use_short_path=False, pretty
|
|||
manifest_file: File object used as the output destination
|
||||
content_map: content_map (see concepts at top of file)
|
||||
use_short_path: write out the manifest file destinations in terms of "short" paths, suitable for `bazel run`.
|
||||
pretty_print: indent the output nicely. Takes more space so it is off by default.
|
||||
"""
|
||||
ctx.actions.write(
|
||||
manifest_file,
|
||||
|
@ -565,11 +562,11 @@ def write_manifest(ctx, manifest_file, content_map, use_short_path=False, pretty
|
|||
[
|
||||
_encode_manifest_entry(dst, content_map[dst], use_short_path, pretty_print)
|
||||
for dst in sorted(content_map.keys())
|
||||
]
|
||||
) + "\n]\n"
|
||||
],
|
||||
) + "\n]\n",
|
||||
)
|
||||
|
||||
def _encode_manifest_entry(dest, df, use_short_path, pretty_print=False):
|
||||
def _encode_manifest_entry(dest, df, use_short_path, pretty_print = False):
|
||||
entry_type = df.entry_type if hasattr(df, "entry_type") else ENTRY_IS_FILE
|
||||
if df.src:
|
||||
src = df.src.short_path if use_short_path else df.src.path
|
||||
|
@ -589,11 +586,11 @@ def _encode_manifest_entry(dest, df, use_short_path, pretty_print=False):
|
|||
# Since this causes all sorts of chaos with our tests, be consistent across
|
||||
# all Bazel versions.
|
||||
origin_str = str(df.origin)
|
||||
if not origin_str.startswith('@'):
|
||||
origin_str = '@' + origin_str
|
||||
if not origin_str.startswith("@"):
|
||||
origin_str = "@" + origin_str
|
||||
|
||||
data = {
|
||||
"type": df.entry_type,
|
||||
"type": entry_type,
|
||||
"src": src,
|
||||
"dest": dest.strip("/"),
|
||||
"mode": df.mode or "",
|
||||
|
|
|
@ -19,10 +19,10 @@ load(
|
|||
"//pkg/private:pkg_files.bzl",
|
||||
"add_directory",
|
||||
"add_empty_file",
|
||||
"add_label_list",
|
||||
"add_single_file",
|
||||
"add_symlink",
|
||||
"add_tree_artifact",
|
||||
"create_mapping_context_from_ctx",
|
||||
"process_src",
|
||||
"write_manifest",
|
||||
)
|
||||
|
@ -59,11 +59,11 @@ def _pkg_tar_impl(ctx):
|
|||
# Files needed by rule implementation at runtime
|
||||
files = []
|
||||
|
||||
outputs, output_file, output_name = setup_output_files(ctx)
|
||||
outputs, output_file, _ = setup_output_files(ctx)
|
||||
|
||||
# Compute the relative path
|
||||
data_path = compute_data_path(ctx, ctx.attr.strip_prefix)
|
||||
data_path_without_prefix = compute_data_path(ctx, ".")
|
||||
data_path = compute_data_path(ctx.label, ctx.attr.strip_prefix)
|
||||
data_path_without_prefix = compute_data_path(ctx.label, ".")
|
||||
|
||||
# Find a list of path remappings to apply.
|
||||
remap_paths = ctx.attr.remap_paths
|
||||
|
@ -114,28 +114,26 @@ def _pkg_tar_impl(ctx):
|
|||
args.add("--mtime", "portable")
|
||||
|
||||
# Now we begin processing the files.
|
||||
file_deps = [] # inputs we depend on
|
||||
content_map = {} # content handled in the manifest
|
||||
mapping_context = create_mapping_context_from_ctx(
|
||||
ctx,
|
||||
label = ctx.label,
|
||||
include_runfiles = False, # TODO(aiuto): ctx.attr.include_runfiles,
|
||||
strip_prefix = ctx.attr.strip_prefix,
|
||||
default_mode = ctx.attr.mode,
|
||||
)
|
||||
|
||||
# Start with all the pkg_* inputs
|
||||
for src in ctx.attr.srcs:
|
||||
if not process_src(
|
||||
ctx,
|
||||
content_map,
|
||||
file_deps,
|
||||
mapping_context,
|
||||
src = src,
|
||||
origin = src.label,
|
||||
default_mode = None,
|
||||
default_user = None,
|
||||
default_group = None,
|
||||
default_uid = None,
|
||||
default_gid = None,
|
||||
):
|
||||
src_files = src[DefaultInfo].files.to_list()
|
||||
if ctx.attr.include_runfiles:
|
||||
runfiles = src[DefaultInfo].default_runfiles
|
||||
if runfiles:
|
||||
file_deps.append(runfiles.files)
|
||||
mapping_context.file_deps.append(runfiles.files)
|
||||
src_files.extend(runfiles.files.to_list())
|
||||
|
||||
# Add in the files of srcs which are not pkg_* types
|
||||
|
@ -148,19 +146,19 @@ def _pkg_tar_impl(ctx):
|
|||
# I am fine with that if it makes the code more readable.
|
||||
dest = _remap(remap_paths, d_path)
|
||||
if f.is_directory:
|
||||
add_tree_artifact(content_map, dest, f, src.label)
|
||||
add_tree_artifact(mapping_context.content_map, dest, f, src.label)
|
||||
else:
|
||||
# Note: This extra remap is the bottleneck preventing this
|
||||
# large block from being a utility method as shown below.
|
||||
# Should we disallow mixing pkg_files in srcs with remap?
|
||||
# I am fine with that if it makes the code more readable.
|
||||
dest = _remap(remap_paths, d_path)
|
||||
add_single_file(ctx, content_map, dest, f, src.label)
|
||||
add_single_file(mapping_context, dest, f, src.label)
|
||||
|
||||
# TODO(aiuto): I want the code to look like this, but we don't have lambdas.
|
||||
# transform_path = lambda f: _remap(
|
||||
# remap_paths, dest_path(f, data_path, data_path_without_prefix))
|
||||
# add_label_list(ctx, content_map, file_deps, ctx.attr.srcs, transform_path)
|
||||
# add_label_list(mapping_context, ctx.attr.srcs, transform_path)
|
||||
|
||||
# The files attribute is a map of labels to destinations. We can add them
|
||||
# directly to the content map.
|
||||
|
@ -168,10 +166,9 @@ def _pkg_tar_impl(ctx):
|
|||
target_files = target.files.to_list()
|
||||
if len(target_files) != 1:
|
||||
fail("Each input must describe exactly one file.", attr = "files")
|
||||
file_deps.append(depset([target_files[0]]))
|
||||
mapping_context.file_deps.append(depset([target_files[0]]))
|
||||
add_single_file(
|
||||
ctx,
|
||||
content_map,
|
||||
mapping_context,
|
||||
f_dest_path,
|
||||
target_files[0],
|
||||
target.label,
|
||||
|
@ -190,15 +187,14 @@ def _pkg_tar_impl(ctx):
|
|||
"%s=%s" % (_quote(key), ctx.attr.ownernames[key]),
|
||||
)
|
||||
for empty_file in ctx.attr.empty_files:
|
||||
add_empty_file(ctx, content_map, empty_file, ctx.label)
|
||||
add_empty_file(mapping_context, empty_file, ctx.label)
|
||||
for empty_dir in ctx.attr.empty_dirs or []:
|
||||
add_directory(content_map, empty_dir, ctx.label)
|
||||
add_directory(mapping_context, empty_dir, ctx.label)
|
||||
for f in ctx.files.deps:
|
||||
args.add("--tar", f.path)
|
||||
for link in ctx.attr.symlinks:
|
||||
add_symlink(
|
||||
ctx,
|
||||
content_map,
|
||||
mapping_context,
|
||||
link,
|
||||
ctx.attr.symlinks[link],
|
||||
ctx.label,
|
||||
|
@ -210,13 +206,16 @@ def _pkg_tar_impl(ctx):
|
|||
|
||||
manifest_file = ctx.actions.declare_file(ctx.label.name + ".manifest")
|
||||
files.append(manifest_file)
|
||||
write_manifest(ctx, manifest_file, content_map)
|
||||
write_manifest(ctx, manifest_file, mapping_context.content_map)
|
||||
args.add("--manifest", manifest_file.path)
|
||||
|
||||
args.set_param_file_format("flag_per_line")
|
||||
args.use_param_file("@%s", use_always = False)
|
||||
|
||||
inputs = depset(direct = ctx.files.deps + files, transitive = file_deps)
|
||||
inputs = depset(
|
||||
direct = ctx.files.deps + files,
|
||||
transitive = mapping_context.file_deps,
|
||||
)
|
||||
|
||||
ctx.actions.run(
|
||||
mnemonic = "PackageTar",
|
||||
|
@ -281,7 +280,7 @@ pkg_tar_impl = rule(
|
|||
"mtime": attr.int(default = _DEFAULT_MTIME),
|
||||
"portable_mtime": attr.bool(default = True),
|
||||
"owner": attr.string(
|
||||
doc = """Default numeric owner.group to apply to files when not set via pkg_attribures.""",
|
||||
doc = """Default numeric owner.group to apply to files when not set via pkg_attributes.""",
|
||||
default = "0.0",
|
||||
),
|
||||
"ownername": attr.string(default = "."),
|
||||
|
@ -310,12 +309,12 @@ pkg_tar_impl = rule(
|
|||
providers = [PackageVariablesInfo],
|
||||
),
|
||||
"allow_duplicates_with_different_content": attr.bool(
|
||||
default=True,
|
||||
doc="""If true, will allow you to reference multiple pkg_* which conflict
|
||||
default = True,
|
||||
doc = """If true, will allow you to reference multiple pkg_* which conflict
|
||||
(writing different content or metadata to the same destination).
|
||||
Such behaviour is always incorrect, but we provide a flag to support it in case old
|
||||
builds were accidentally doing it. Never explicitly set this to true for new code.
|
||||
"""
|
||||
""",
|
||||
),
|
||||
"stamp": attr.int(
|
||||
doc = """Enable file time stamping. Possible values:
|
||||
|
@ -340,6 +339,7 @@ builds were accidentally doing it. Never explicitly set this to true for new cod
|
|||
},
|
||||
)
|
||||
|
||||
# buildifier: disable=function-docstring-args
|
||||
def pkg_tar(name, **kwargs):
|
||||
"""Creates a .tar file. See pkg_tar_impl.
|
||||
|
||||
|
|
|
@ -13,26 +13,26 @@
|
|||
# limitations under the License.
|
||||
"""Zip archive creation rule and associated logic."""
|
||||
|
||||
load("//pkg:path.bzl", "compute_data_path", "dest_path")
|
||||
load(
|
||||
"//pkg:providers.bzl",
|
||||
"PackageVariablesInfo",
|
||||
)
|
||||
load(
|
||||
"//pkg/private:pkg_files.bzl",
|
||||
"add_label_list",
|
||||
"create_mapping_context_from_ctx",
|
||||
"write_manifest",
|
||||
)
|
||||
load(
|
||||
"//pkg/private:util.bzl",
|
||||
"setup_output_files",
|
||||
"substitute_package_variables",
|
||||
)
|
||||
load(
|
||||
"//pkg/private:pkg_files.bzl",
|
||||
"add_label_list",
|
||||
"write_manifest",
|
||||
)
|
||||
|
||||
_stamp_condition = Label("//pkg/private:private_stamp_detect")
|
||||
|
||||
def _pkg_zip_impl(ctx):
|
||||
outputs, output_file, output_name = setup_output_files(ctx)
|
||||
outputs, output_file, _ = setup_output_files(ctx)
|
||||
|
||||
args = ctx.actions.args()
|
||||
args.add("-o", output_file.path)
|
||||
|
@ -47,21 +47,23 @@ def _pkg_zip_impl(ctx):
|
|||
args.add("--stamp_from", ctx.version_file.path)
|
||||
inputs.append(ctx.version_file)
|
||||
|
||||
data_path = compute_data_path(ctx, ctx.attr.strip_prefix)
|
||||
data_path_without_prefix = compute_data_path(ctx, ".")
|
||||
|
||||
content_map = {} # content handled in the manifest
|
||||
file_deps = [] # list of Depsets needed by srcs
|
||||
add_label_list(ctx, content_map, file_deps, srcs = ctx.attr.srcs)
|
||||
mapping_context = create_mapping_context_from_ctx(
|
||||
ctx,
|
||||
label = ctx.label,
|
||||
include_runfiles = ctx.attr.include_runfiles,
|
||||
strip_prefix = ctx.attr.strip_prefix,
|
||||
default_mode = ctx.attr.mode,
|
||||
)
|
||||
add_label_list(mapping_context, srcs = ctx.attr.srcs)
|
||||
|
||||
manifest_file = ctx.actions.declare_file(ctx.label.name + ".manifest")
|
||||
inputs.append(manifest_file)
|
||||
write_manifest(ctx, manifest_file, content_map)
|
||||
write_manifest(ctx, manifest_file, mapping_context.content_map)
|
||||
args.add("--manifest", manifest_file.path)
|
||||
args.set_param_file_format("multiline")
|
||||
args.use_param_file("@%s")
|
||||
|
||||
all_inputs = depset(direct = inputs, transitive = file_deps)
|
||||
all_inputs = depset(direct = inputs, transitive = mapping_context.file_deps)
|
||||
|
||||
ctx.actions.run(
|
||||
mnemonic = "PackageZip",
|
||||
|
@ -102,6 +104,9 @@ The name may contain variables, same as [package_file_name](#package_file_name)"
|
|||
default = "/",
|
||||
),
|
||||
"strip_prefix": attr.string(),
|
||||
"include_runfiles": attr.bool(
|
||||
doc = """See standard attributes.""",
|
||||
),
|
||||
"timestamp": attr.int(
|
||||
doc = """Time stamp to place on all files in the archive, expressed
|
||||
as seconds since the Unix Epoch, as per RFC 3339. The default is January 01,
|
||||
|
@ -114,13 +119,13 @@ limited to a granularity of 2 seconds.""",
|
|||
),
|
||||
"compression_level": attr.int(
|
||||
default = 6,
|
||||
doc = "The compression level to use, 1 is the fastest, 9 gives the smallest results. 0 skips compression, depending on the method used"
|
||||
doc = "The compression level to use, 1 is the fastest, 9 gives the smallest results. 0 skips compression, depending on the method used",
|
||||
),
|
||||
"compression_type": attr.string(
|
||||
default = "deflated",
|
||||
doc = """The compression to use. Note that lzma and bzip2 might not be supported by all readers.
|
||||
The list of compressions is the same as Python's ZipFile: https://docs.python.org/3/library/zipfile.html#zipfile.ZIP_STORED""",
|
||||
values = ["deflated", "lzma", "bzip2", "stored"]
|
||||
values = ["deflated", "lzma", "bzip2", "stored"],
|
||||
),
|
||||
|
||||
# Common attributes
|
||||
|
@ -141,14 +146,13 @@ The list of compressions is the same as Python's ZipFile: https://docs.python.or
|
|||
""",
|
||||
default = 0,
|
||||
),
|
||||
|
||||
"allow_duplicates_with_different_content": attr.bool(
|
||||
default=True,
|
||||
doc="""If true, will allow you to reference multiple pkg_* which conflict
|
||||
default = True,
|
||||
doc = """If true, will allow you to reference multiple pkg_* which conflict
|
||||
(writing different content or metadata to the same destination).
|
||||
Such behaviour is always incorrect, but we provide a flag to support it in case old
|
||||
builds were accidentally doing it. Never explicitly set this to true for new code.
|
||||
"""
|
||||
""",
|
||||
),
|
||||
# Is --stamp set on the command line?
|
||||
# TODO(https://github.com/bazelbuild/rules_pkg/issues/340): Remove this.
|
||||
|
@ -170,7 +174,9 @@ def pkg_zip(name, out = None, **kwargs):
|
|||
@wraps(pkg_zip_impl)
|
||||
|
||||
Args:
|
||||
out: output file name. Default: name + ".zip".
|
||||
name: name
|
||||
out: output file name. Default: name + ".zip".
|
||||
**kwargs: the rest
|
||||
"""
|
||||
if not out:
|
||||
out = name + ".zip"
|
||||
|
|
|
@ -14,16 +14,15 @@
|
|||
|
||||
"""Tests for path.bzl"""
|
||||
|
||||
load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
|
||||
load("//pkg:mappings.bzl", "pkg_mkdirs")
|
||||
load("//pkg:path.bzl", "compute_data_path")
|
||||
load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts", "unittest")
|
||||
|
||||
##########
|
||||
# Test compute_data_path
|
||||
##########
|
||||
def _compute_data_path_test_impl(ctx):
|
||||
env = analysistest.begin(ctx)
|
||||
target_under_test = analysistest.target_under_test(env)
|
||||
|
||||
# Subtle: This allows you to vendor the library into your own repo at some
|
||||
# arbitrary path.
|
||||
|
@ -33,7 +32,7 @@ def _compute_data_path_test_impl(ctx):
|
|||
asserts.equals(
|
||||
env,
|
||||
expect,
|
||||
compute_data_path(ctx, ctx.attr.in_path),
|
||||
compute_data_path(ctx.label, ctx.attr.in_path),
|
||||
)
|
||||
return analysistest.end(env)
|
||||
|
||||
|
|
|
@ -13,9 +13,9 @@
|
|||
# limitations under the License.
|
||||
"""Rules to aid testing"""
|
||||
|
||||
load("//pkg/private:pkg_files.bzl", "add_label_list", "write_manifest")
|
||||
load("//pkg:providers.bzl", "PackageFilegroupInfo", "PackageSymlinkInfo")
|
||||
load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
|
||||
load("//pkg:providers.bzl", "PackageFilegroupInfo", "PackageSymlinkInfo")
|
||||
load("//pkg/private:pkg_files.bzl", "add_label_list", "create_mapping_context_from_ctx", "write_manifest") # buildifier: disable=bzl-visibility
|
||||
|
||||
def _directory_impl(ctx):
|
||||
out_dir_file = ctx.actions.declare_directory(ctx.attr.outdir or ctx.attr.name)
|
||||
|
@ -30,7 +30,7 @@ def _directory_impl(ctx):
|
|||
|
||||
for link, target in ctx.attr.links.items():
|
||||
args.add(link)
|
||||
args.add('@@' + target)
|
||||
args.add("@@" + target)
|
||||
|
||||
ctx.actions.run(
|
||||
outputs = [out_dir_file],
|
||||
|
@ -113,13 +113,16 @@ cc_binary in complexity, but does not depend on a large toolchain.""",
|
|||
def _link_tree_impl(ctx):
|
||||
links = []
|
||||
prefix = ctx.attr.package_dir or ""
|
||||
if prefix and not prefix.endswith('/'):
|
||||
if prefix and not prefix.endswith("/"):
|
||||
prefix = prefix + "/"
|
||||
for link, target in ctx.attr.links.items():
|
||||
# DBG print(' %s -> %s ' % (link, target))
|
||||
links.append(
|
||||
(PackageSymlinkInfo(destination = prefix + link, target = target),
|
||||
ctx.label))
|
||||
(
|
||||
PackageSymlinkInfo(destination = prefix + link, target = target),
|
||||
ctx.label,
|
||||
),
|
||||
)
|
||||
return [PackageFilegroupInfo(pkg_symlinks = links)]
|
||||
|
||||
link_tree = rule(
|
||||
|
@ -142,10 +145,9 @@ The keys of links are paths to create. The values are the target of the links."
|
|||
)
|
||||
|
||||
def _write_content_manifest_impl(ctx):
|
||||
content_map = {} # content handled in the manifest
|
||||
file_deps = [] # inputs we depend on
|
||||
add_label_list(ctx, content_map, file_deps, ctx.attr.srcs)
|
||||
write_manifest(ctx, ctx.outputs.out, content_map, use_short_path = ctx.attr.use_short_path)
|
||||
mapping_context = create_mapping_context_from_ctx(ctx, ctx.label)
|
||||
add_label_list(mapping_context, ctx.attr.srcs)
|
||||
write_manifest(ctx, ctx.outputs.out, mapping_context.content_map, use_short_path = ctx.attr.use_short_path)
|
||||
|
||||
_write_content_manifest = rule(
|
||||
doc = """Helper rule to write the content manifest for a pkg_*.
|
||||
|
@ -181,7 +183,7 @@ def write_content_manifest(name, srcs, **kwargs):
|
|||
srcs = srcs,
|
||||
out = out,
|
||||
use_short_path = use_short_path,
|
||||
**kwargs,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
############################################################
|
||||
|
|
Loading…
Reference in New Issue