Format code using yapf

This commit is contained in:
Eloston
2018-07-29 07:24:29 +00:00
parent 2bcee6553d
commit 4d527713d1
17 changed files with 335 additions and 229 deletions

8
.style.yapf Normal file
View File

@@ -0,0 +1,8 @@
[style]
based_on_style = pep8
allow_split_before_dict_value = false
coalesce_brackets = true
column_limit = 100
indent_width = 4
join_multiple_lines = true
spaces_before_comment = 1

View File

@@ -4,7 +4,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
CLI entry point when invoking the module directly

View File

@@ -4,7 +4,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
buildkit: A small helper utility for building ungoogled-chromium.
@@ -24,9 +23,11 @@ from .extraction import prune_dir
# Classes
class _CLIError(RuntimeError):
"""Custom exception for printing argument parser errors from callbacks"""
class NewBundleAction(argparse.Action): #pylint: disable=too-few-public-methods
"""argparse.ArgumentParser action handler with more verbose logging"""
@@ -46,35 +47,50 @@ class NewBundleAction(argparse.Action): #pylint: disable=too-few-public-methods
parser.exit(status=1)
setattr(namespace, self.dest, bundle)
# Methods
def setup_bundle_arg(parser):
"""Helper to add an argparse.ArgumentParser argument for a config bundle"""
parser.add_argument(
'-b', '--bundle', metavar='PATH', dest='bundle', required=True, action=NewBundleAction,
'-b',
'--bundle',
metavar='PATH',
dest='bundle',
required=True,
action=NewBundleAction,
help='Path to the bundle. Dependencies must reside next to the bundle.')
def _add_downloads(subparsers):
"""Retrieve, check, and unpack downloads"""
def _add_common_args(parser):
setup_bundle_arg(parser)
parser.add_argument(
'-c', '--cache', type=Path, required=True,
'-c',
'--cache',
type=Path,
required=True,
help='Path to the directory to cache downloads.')
def _retrieve_callback(args):
downloads.retrieve_downloads(
args.bundle, args.cache, args.show_progress, args.disable_ssl_verification)
downloads.retrieve_downloads(args.bundle, args.cache, args.show_progress,
args.disable_ssl_verification)
try:
downloads.check_downloads(args.bundle, args.cache)
except downloads.HashMismatchError as exc:
get_logger().error('File checksum does not match: %s', exc)
raise _CLIError()
def _unpack_callback(args):
extractors = {
ExtractorEnum.SEVENZIP: args.sevenz_path,
ExtractorEnum.TAR: args.tar_path,
}
downloads.unpack_downloads(args.bundle, args.cache, args.output, extractors)
# downloads
parser = subparsers.add_parser(
'downloads', help=_add_downloads.__doc__ + '.', description=_add_downloads.__doc__)
@@ -83,36 +99,45 @@ def _add_downloads(subparsers):
# downloads retrieve
retrieve_parser = subsubparsers.add_parser(
'retrieve', help='Retrieve and check download files',
'retrieve',
help='Retrieve and check download files',
description='Retrieves and checks downloads without unpacking.')
_add_common_args(retrieve_parser)
retrieve_parser.add_argument(
'--hide-progress-bar', action='store_false', dest='show_progress',
'--hide-progress-bar',
action='store_false',
dest='show_progress',
help='Hide the download progress.')
retrieve_parser.add_argument(
'--disable-ssl-verification', action='store_true',
'--disable-ssl-verification',
action='store_true',
help='Disables certification verification for downloads using HTTPS.')
retrieve_parser.set_defaults(callback=_retrieve_callback)
# downloads unpack
unpack_parser = subsubparsers.add_parser(
'unpack', help='Unpack download files',
'unpack',
help='Unpack download files',
description='Verifies hashes of and unpacks download files into the specified directory.')
_add_common_args(unpack_parser)
unpack_parser.add_argument(
'--tar-path', default='tar',
'--tar-path',
default='tar',
help=('(Linux and macOS only) Command or path to the BSD or GNU tar '
'binary for extraction. Default: %(default)s'))
unpack_parser.add_argument(
'--7z-path', dest='sevenz_path', default=SEVENZIP_USE_REGISTRY,
'--7z-path',
dest='sevenz_path',
default=SEVENZIP_USE_REGISTRY,
help=('Command or path to 7-Zip\'s "7z" binary. If "_use_registry" is '
'specified, determine the path from the registry. Default: %(default)s'))
unpack_parser.add_argument(
'output', type=Path, help='The directory to unpack to.')
unpack_parser.add_argument('output', type=Path, help='The directory to unpack to.')
unpack_parser.set_defaults(callback=_unpack_callback)
def _add_prune(subparsers):
"""Prunes binaries in the given path."""
def _callback(args):
if not args.directory.exists():
get_logger().error('Specified directory does not exist: %s', args.directory)
@@ -121,15 +146,16 @@ def _add_prune(subparsers):
if unremovable_files:
get_logger().error('Files could not be pruned: %s', unremovable_files)
raise _CLIError()
parser = subparsers.add_parser(
'prune', help=_add_prune.__doc__, description=_add_prune.__doc__)
parser = subparsers.add_parser('prune', help=_add_prune.__doc__, description=_add_prune.__doc__)
setup_bundle_arg(parser)
parser.add_argument(
'directory', type=Path, help='The directory to apply binary pruning.')
parser.add_argument('directory', type=Path, help='The directory to apply binary pruning.')
parser.set_defaults(callback=_callback)
def _add_domains(subparsers):
"""Operations with domain substitution"""
def _callback(args):
try:
if args.reverting:
@@ -148,6 +174,7 @@ def _add_domains(subparsers):
except KeyError as exc:
get_logger().error('%s', exc)
raise _CLIError()
# domains
parser = subparsers.add_parser(
'domains', help=_add_domains.__doc__, description=_add_domains.__doc__)
@@ -158,39 +185,49 @@ def _add_domains(subparsers):
# domains apply
apply_parser = subsubparsers.add_parser(
'apply', help='Apply domain substitution',
'apply',
help='Apply domain substitution',
description='Applies domain substitution and creates the domain substitution cache.')
setup_bundle_arg(apply_parser)
apply_parser.add_argument(
'-c', '--cache', type=Path, required=True,
'-c',
'--cache',
type=Path,
required=True,
help='The path to the domain substitution cache. The path must not already exist.')
apply_parser.add_argument(
'directory', type=Path,
help='The directory to apply domain substitution')
'directory', type=Path, help='The directory to apply domain substitution')
apply_parser.set_defaults(reverting=False)
# domains revert
revert_parser = subsubparsers.add_parser(
'revert', help='Revert domain substitution',
'revert',
help='Revert domain substitution',
description='Reverts domain substitution based only on the domain substitution cache.')
revert_parser.add_argument(
'directory', type=Path,
help='The directory to reverse domain substitution')
'directory', type=Path, help='The directory to reverse domain substitution')
revert_parser.add_argument(
'-c', '--cache', type=Path, required=True,
'-c',
'--cache',
type=Path,
required=True,
help=('The path to the domain substitution cache. '
'The path must exist and will be removed if successful.'))
revert_parser.set_defaults(reverting=True)
def _add_patches(subparsers):
"""Operations with patches"""
def _export_callback(args):
patches.export_patches(args.bundle, args.output)
def _apply_callback(args):
patches.apply_patches(
patches.patch_paths_by_bundle(args.bundle),
args.directory,
patch_bin_path=args.patch_bin)
# patches
parser = subparsers.add_parser(
'patches', help=_add_patches.__doc__, description=_add_patches.__doc__)
@@ -199,11 +236,13 @@ def _add_patches(subparsers):
# patches export
export_parser = subsubparsers.add_parser(
'export', help='Export patches in GNU quilt-compatible format',
'export',
help='Export patches in GNU quilt-compatible format',
description='Export a config bundle\'s patches to a quilt-compatible format')
setup_bundle_arg(export_parser)
export_parser.add_argument(
'output', type=Path,
'output',
type=Path,
help='The directory to write to. It must either be empty or not exist.')
export_parser.set_defaults(callback=_export_callback)
@@ -216,10 +255,13 @@ def _add_patches(subparsers):
apply_parser.add_argument('directory', type=Path, help='The source tree to apply patches.')
apply_parser.set_defaults(callback=_apply_callback)
def _add_gnargs(subparsers):
"""Operations with GN arguments"""
def _print_callback(args):
print(str(args.bundle.gn_flags), end='')
# gnargs
parser = subparsers.add_parser(
'gnargs', help=_add_gnargs.__doc__, description=_add_gnargs.__doc__)
@@ -227,15 +269,17 @@ def _add_gnargs(subparsers):
# gnargs print
print_parser = subsubparsers.add_parser(
'print', help='Prints GN args in args.gn format',
'print',
help='Prints GN args in args.gn format',
description='Prints a list of GN args in args.gn format to standard output')
setup_bundle_arg(print_parser)
print_parser.set_defaults(callback=_print_callback)
def main(arg_list=None):
"""CLI entry point"""
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=argparse.RawTextHelpFormatter)
parser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawTextHelpFormatter)
subparsers = parser.add_subparsers(title='Available commands', dest='command')
subparsers.required = True # Workaround for http://bugs.python.org/issue9253#msg186387

View File

@@ -3,7 +3,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Common code and constants"""
import configparser
@@ -25,19 +24,24 @@ _ENV_FORMAT = "BUILDKIT_{}"
# Helpers for third_party.schema
def schema_dictcast(data):
"""Cast data to dictionary for third_party.schema and configparser data structures"""
return schema.And(schema.Use(dict), data)
def schema_inisections(data):
"""Cast configparser data structure to dict and remove DEFAULT section"""
return schema_dictcast({configparser.DEFAULTSECT: object, **data})
# Public classes
class BuildkitError(Exception):
"""Represents a generic custom error from buildkit"""
class BuildkitAbort(BuildkitError):
"""
Exception thrown when all details have been logged and buildkit aborts.
@@ -45,20 +49,24 @@ class BuildkitAbort(BuildkitError):
It should only be caught by the user of buildkit's library interface.
"""
class PlatformEnum(enum.Enum):
"""Enum for platforms that need distinction for certain functionality"""
UNIX = 'unix' # Currently covers anything that isn't Windows
WINDOWS = 'windows'
class ExtractorEnum: #pylint: disable=too-few-public-methods
"""Enum for extraction binaries"""
SEVENZIP = '7z'
TAR = 'tar'
# Public methods
def get_logger(name=__package__, initial_level=logging.DEBUG,
prepend_timestamp=True, log_init=True):
def get_logger(name=__package__, initial_level=logging.DEBUG, prepend_timestamp=True,
log_init=True):
'''Gets the named logger'''
logger = logging.getLogger(name)
@@ -84,6 +92,7 @@ def get_logger(name=__package__, initial_level=logging.DEBUG,
logger.debug("Initialized logger '%s'", name)
return logger
def dir_empty(path):
"""
Returns True if the directory is empty; False otherwise
@@ -96,6 +105,7 @@ def dir_empty(path):
return True
return False
def ensure_empty_dir(path, parents=False):
"""
Makes a directory at path if it doesn't exist. If it exists, check if it is empty.
@@ -111,6 +121,7 @@ def ensure_empty_dir(path, parents=False):
if not dir_empty(path):
raise exc
def get_running_platform():
"""
Returns a PlatformEnum value indicating the platform that buildkit is running on.
@@ -124,18 +135,19 @@ def get_running_platform():
# Only Windows and UNIX-based platforms need to be distinguished right now.
return PlatformEnum.UNIX
def _read_version_ini():
version_schema = schema.Schema(schema_inisections({
'version': schema_dictcast({
'chromium_version': schema.And(str, len),
'release_revision': schema.And(str, len),
schema.Optional('release_extra'): schema.And(str, len),
})
}))
version_schema = schema.Schema(
schema_inisections({
'version': schema_dictcast({
'chromium_version': schema.And(str, len),
'release_revision': schema.And(str, len),
schema.Optional('release_extra'): schema.And(str, len),
})
}))
version_parser = configparser.ConfigParser()
version_parser.read(
str(Path(__file__).absolute().parent.parent / 'version.ini'),
encoding=ENCODING)
str(Path(__file__).absolute().parent.parent / 'version.ini'), encoding=ENCODING)
try:
version_schema.validate(version_parser)
except schema.SchemaError as exc:
@@ -143,20 +155,24 @@ def _read_version_ini():
raise exc
return version_parser
def get_chromium_version():
"""Returns the Chromium version."""
return _VERSION_INI['version']['chromium_version']
def get_release_revision():
"""Returns the release revision."""
return _VERSION_INI['version']['release_revision']
def get_release_extra(fallback=None):
"""
Return the release revision extra info, or returns fallback if it is not defined.
"""
return _VERSION_INI['version'].get('release_extra', fallback=fallback)
def get_version_string():
"""
Returns a version string containing all information in a Debian-like format.
@@ -167,4 +183,5 @@ def get_version_string():
result += '~{}'.format(release_extra)
return result
_VERSION_INI = _read_version_ini()

View File

@@ -3,7 +3,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Build configuration generation implementation
"""
@@ -16,16 +15,17 @@ import io
import re
from pathlib import Path
from .common import (
ENCODING, BuildkitError, ExtractorEnum, get_logger, get_chromium_version)
from .common import (ENCODING, BuildkitError, ExtractorEnum, get_logger, get_chromium_version)
from .downloads import HashesURLEnum
from .third_party import schema
# Classes
class BuildkitConfigError(BuildkitError):
"""Exception class for the config module"""
class _ConfigFile(abc.ABC): #pylint: disable=too-few-public-methods
"""
Base config file class
@@ -66,6 +66,7 @@ class _ConfigFile(abc.ABC): #pylint: disable=too-few-public-methods
def __str__(self):
"""String contents of the config file"""
class _IniConfigFile(_ConfigFile): #pylint: disable=too-few-public-methods
"""
Base class for INI config files
@@ -82,13 +83,14 @@ class _IniConfigFile(_ConfigFile): #pylint: disable=too-few-public-methods
Raises schema.SchemaError if validation fails
"""
def _section_generator(data):
for section in data:
if section == configparser.DEFAULTSECT:
continue
yield section, dict(filter(
lambda x: x[0] not in self._ini_vars,
data.items(section)))
yield section, dict(
filter(lambda x: x[0] not in self._ini_vars, data.items(section)))
new_data = configparser.ConfigParser(defaults=self._ini_vars)
with path.open(encoding=ENCODING) as ini_file:
new_data.read_file(ini_file, source=str(path))
@@ -97,8 +99,8 @@ class _IniConfigFile(_ConfigFile): #pylint: disable=too-few-public-methods
try:
self._schema.validate(dict(_section_generator(new_data)))
except schema.SchemaError as exc:
get_logger().error(
'INI file for %s failed schema validation: %s', type(self).__name__, path)
get_logger().error('INI file for %s failed schema validation: %s',
type(self).__name__, path)
raise exc
return new_data
@@ -138,6 +140,7 @@ class _IniConfigFile(_ConfigFile): #pylint: disable=too-few-public-methods
"""Returns an iterator over the section names"""
return iter(self._data.sections())
class ListConfigFile(_ConfigFile): #pylint: disable=too-few-public-methods
"""
Represents a simple newline-delimited list
@@ -165,6 +168,7 @@ class ListConfigFile(_ConfigFile): #pylint: disable=too-few-public-methods
"""Returns an iterator over the list items"""
return iter(self._data)
class MapConfigFile(_ConfigFile):
"""Represents a simple string-keyed and string-valued dictionary"""
@@ -178,8 +182,7 @@ class MapConfigFile(_ConfigFile):
key, value = line.split('=')
if key in new_data:
raise ValueError(
'Map file "%s" contains key "%s" at least twice.' %
(path, key))
'Map file "%s" contains key "%s" at least twice.' % (path, key))
new_data[key] = value
return new_data
@@ -218,6 +221,7 @@ class MapConfigFile(_ConfigFile):
"""
return self._data.items()
class BundleMetaIni(_IniConfigFile):
"""Represents bundlemeta.ini files"""
@@ -245,6 +249,7 @@ class BundleMetaIni(_IniConfigFile):
return [x.strip() for x in self['bundle']['depends'].split(',')]
return tuple()
class DomainRegexList(ListConfigFile):
"""Representation of a domain_regex_list file"""
_regex_pair_tuple = collections.namedtuple('DomainRegexPair', ('pattern', 'replacement'))
@@ -278,15 +283,18 @@ class DomainRegexList(ListConfigFile):
"""
Returns a single expression to search for domains
"""
return re.compile('|'.join(
map(lambda x: x.split(self._PATTERN_REPLACE_DELIM, 1)[0], self)))
return re.compile('|'.join(map(lambda x: x.split(self._PATTERN_REPLACE_DELIM, 1)[0], self)))
class DownloadsIni(_IniConfigFile): #pylint: disable=too-few-public-methods
"""Representation of an downloads.ini file"""
_hashes = ('md5', 'sha1', 'sha256', 'sha512')
_nonempty_keys = ('url', 'download_filename')
_optional_keys = ('version', 'strip_leading_dirs',)
_optional_keys = (
'version',
'strip_leading_dirs',
)
_passthrough_properties = (*_nonempty_keys, *_optional_keys, 'extractor')
_ini_vars = {
'_chromium_version': get_chromium_version(),
@@ -294,9 +302,11 @@ class DownloadsIni(_IniConfigFile): #pylint: disable=too-few-public-methods
_schema = schema.Schema({
schema.Optional(schema.And(str, len)): {
**{x: schema.And(str, len) for x in _nonempty_keys},
**{x: schema.And(str, len)
for x in _nonempty_keys},
'output_path': (lambda x: str(Path(x).relative_to(''))),
**{schema.Optional(x): schema.And(str, len) for x in _optional_keys},
**{schema.Optional(x): schema.And(str, len)
for x in _optional_keys},
schema.Optional('extractor'): schema.Or(ExtractorEnum.TAR, ExtractorEnum.SEVENZIP),
schema.Optional(schema.Or(*_hashes)): schema.And(str, len),
schema.Optional('hash_url'): (
@@ -329,17 +339,16 @@ class DownloadsIni(_IniConfigFile): #pylint: disable=too-few-public-methods
hashes_dict[hash_name] = value
return hashes_dict
else:
raise AttributeError(
'"{}" has no attribute "{}"'.format(type(self).__name__, name))
raise AttributeError('"{}" has no attribute "{}"'.format(type(self).__name__, name))
def __getitem__(self, section):
"""
Returns an object with keys as attributes and
values already pre-processed strings
"""
return self._DownloadsProperties(
self._data[section], self._passthrough_properties,
self._hashes)
return self._DownloadsProperties(self._data[section], self._passthrough_properties,
self._hashes)
class ConfigBundle: #pylint: disable=too-few-public-methods
"""Config bundle implementation"""
@@ -409,8 +418,7 @@ class ConfigBundle: #pylint: disable=too-few-public-methods
if name in self._ATTR_MAPPING:
return self.files[self._ATTR_MAPPING[name]]
else:
raise AttributeError(
'%s has no attribute "%s"' % type(self).__name__, name)
raise AttributeError('%s has no attribute "%s"' % type(self).__name__, name)
def rebase(self, other):
"""Rebase the current bundle onto other, saving changes into self"""

View File

@@ -3,7 +3,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Module for substituting domain names in the source tree with blockable strings.
"""
@@ -28,6 +27,7 @@ _ORIG_DIR = 'orig'
# Private Methods
def _substitute_path(path, regex_iter):
"""
Perform domain substitution on path and add it to the domain substitution cache.
@@ -58,8 +58,7 @@ def _substitute_path(path, regex_iter):
raise UnicodeDecodeError('Unable to decode with any encoding: %s' % path)
file_subs = 0
for regex_pair in regex_iter:
content, sub_count = regex_pair.pattern.subn(
regex_pair.replacement, content)
content, sub_count = regex_pair.pattern.subn(regex_pair.replacement, content)
file_subs += sub_count
if file_subs > 0:
substituted_content = content.encode(encoding)
@@ -69,6 +68,7 @@ def _substitute_path(path, regex_iter):
return (zlib.crc32(substituted_content), original_content)
return (None, None)
def _validate_file_index(index_file, resolved_tree, cache_index_files):
"""
Validation of file index and hashes against the source tree.
@@ -85,31 +85,30 @@ def _validate_file_index(index_file, resolved_tree, cache_index_files):
get_logger().error('Could not split entry "%s": %s', entry, exc)
continue
if not relative_path or not file_hash:
get_logger().error(
'Entry %s of domain substitution cache file index is not valid',
_INDEX_HASH_DELIMITER.join((relative_path, file_hash)))
get_logger().error('Entry %s of domain substitution cache file index is not valid',
_INDEX_HASH_DELIMITER.join((relative_path, file_hash)))
all_hashes_valid = False
continue
if not crc32_regex.match(file_hash):
get_logger().error(
'File index hash for %s does not appear to be a CRC32 hash', relative_path)
get_logger().error('File index hash for %s does not appear to be a CRC32 hash',
relative_path)
all_hashes_valid = False
continue
if zlib.crc32((resolved_tree / relative_path).read_bytes()) != int(file_hash, 16):
get_logger().error(
'Hashes do not match for: %s', relative_path)
get_logger().error('Hashes do not match for: %s', relative_path)
all_hashes_valid = False
continue
if relative_path in cache_index_files:
get_logger().error(
'File %s shows up at least twice in the file index', relative_path)
get_logger().error('File %s shows up at least twice in the file index', relative_path)
all_hashes_valid = False
continue
cache_index_files.add(relative_path)
return all_hashes_valid
# Public Methods
def apply_substitution(config_bundle, source_tree, domainsub_cache):
"""
Substitute domains in source_tree with files and substitutions from config_bundle,
@@ -132,8 +131,9 @@ def apply_substitution(config_bundle, source_tree, domainsub_cache):
resolved_tree = source_tree.resolve()
regex_pairs = config_bundle.domain_regex.get_pairs()
fileindex_content = io.BytesIO()
with tarfile.open(str(domainsub_cache),
'w:%s' % domainsub_cache.suffix[1:], compresslevel=1) as cache_tar:
with tarfile.open(
str(domainsub_cache), 'w:%s' % domainsub_cache.suffix[1:],
compresslevel=1) as cache_tar:
orig_dir = Path(_ORIG_DIR)
for relative_path in config_bundle.domain_substitution:
if _INDEX_HASH_DELIMITER in relative_path:
@@ -141,8 +141,8 @@ def apply_substitution(config_bundle, source_tree, domainsub_cache):
cache_tar.close()
domainsub_cache.unlink()
raise ValueError(
'Path "%s" contains the file index hash delimiter "%s"' %
relative_path, _INDEX_HASH_DELIMITER)
'Path "%s" contains the file index hash delimiter "%s"' % relative_path,
_INDEX_HASH_DELIMITER)
path = resolved_tree / relative_path
if not path.exists():
get_logger().warning('Skipping non-existant path: %s', path)
@@ -150,8 +150,8 @@ def apply_substitution(config_bundle, source_tree, domainsub_cache):
if crc32_hash is None:
get_logger().info('Path has no substitutions: %s', relative_path)
continue
fileindex_content.write('{}{}{:08x}\n'.format(
relative_path, _INDEX_HASH_DELIMITER, crc32_hash).encode(ENCODING))
fileindex_content.write('{}{}{:08x}\n'.format(relative_path, _INDEX_HASH_DELIMITER,
crc32_hash).encode(ENCODING))
orig_tarinfo = tarfile.TarInfo(str(orig_dir / relative_path))
orig_tarinfo.size = len(orig_content)
with io.BytesIO(orig_content) as orig_file:
@@ -161,6 +161,7 @@ def apply_substitution(config_bundle, source_tree, domainsub_cache):
fileindex_content.seek(0)
cache_tar.addfile(fileindex_tarinfo, fileindex_content)
def revert_substitution(domainsub_cache, source_tree):
"""
Revert domain substitution on source_tree using the pre-domain
@@ -196,8 +197,8 @@ def revert_substitution(domainsub_cache, source_tree):
cache_index_files = set() # All files in the file index
with tempfile.TemporaryDirectory(prefix='domsubcache_files',
dir=str(resolved_tree)) as tmp_extract_name:
with tempfile.TemporaryDirectory(
prefix='domsubcache_files', dir=str(resolved_tree)) as tmp_extract_name:
extract_path = Path(tmp_extract_name)
get_logger().debug('Extracting domain substitution cache...')
extract_tar_file(domainsub_cache, extract_path, Path())
@@ -206,9 +207,8 @@ def revert_substitution(domainsub_cache, source_tree):
get_logger().debug('Validating substituted files in source tree...')
with (extract_path / _INDEX_LIST).open('rb') as index_file: #pylint: disable=no-member
if not _validate_file_index(index_file, resolved_tree, cache_index_files):
raise KeyError(
'Domain substitution cache file index is corrupt or hashes mismatch '
'the source tree.')
raise KeyError('Domain substitution cache file index is corrupt or hashes mismatch '
'the source tree.')
# Move original files over substituted ones
get_logger().debug('Moving original files over substituted ones...')

View File

@@ -3,7 +3,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Module for the downloading, checking, and unpacking of necessary files into the source tree
"""
@@ -18,18 +17,23 @@ from .extraction import extract_tar_file, extract_with_7z
# Constants
class HashesURLEnum(str, enum.Enum):
"""Enum for supported hash URL schemes"""
chromium = 'chromium'
# Custom Exceptions
class HashMismatchError(BuildkitError):
"""Exception for computed hashes not matching expected hashes"""
pass
class _UrlRetrieveReportHook: #pylint: disable=too-few-public-methods
"""Hook for urllib.request.urlretrieve to log progress information to console"""
def __init__(self):
self._max_len_printed = 0
self._last_percentage = None
@@ -48,6 +52,7 @@ class _UrlRetrieveReportHook: #pylint: disable=too-few-public-methods
self._max_len_printed = len(status_line)
print('\r' + status_line, end='')
def _download_if_needed(file_path, url, show_progress):
"""
Downloads a file from url to the specified path file_path if necessary.
@@ -65,6 +70,7 @@ def _download_if_needed(file_path, url, show_progress):
if show_progress:
print()
def _chromium_hashes_generator(hashes_path):
with hashes_path.open(encoding=ENCODING) as hashes_file:
hash_lines = hashes_file.read().splitlines()
@@ -74,10 +80,12 @@ def _chromium_hashes_generator(hashes_path):
else:
get_logger().warning('Skipping unknown hash algorithm: %s', hash_name)
def _downloads_iter(config_bundle):
"""Iterator for the downloads ordered by output path"""
return sorted(config_bundle.downloads, key=(lambda x: str(Path(x.output_path))))
def _get_hash_pairs(download_properties, cache_dir):
"""Generator of (hash_name, hash_hex) for the given download"""
for entry_type, entry_value in download_properties.hashes.items():
@@ -90,6 +98,7 @@ def _get_hash_pairs(download_properties, cache_dir):
else:
yield entry_type, entry_value
def retrieve_downloads(config_bundle, cache_dir, show_progress, disable_ssl_verification=False):
"""
Retrieve downloads into the downloads cache.
@@ -128,6 +137,7 @@ def retrieve_downloads(config_bundle, cache_dir, show_progress, disable_ssl_veri
if disable_ssl_verification:
ssl._create_default_https_context = orig_https_context #pylint: disable=protected-access
def check_downloads(config_bundle, cache_dir):
"""
Check integrity of the downloads cache.
@@ -149,6 +159,7 @@ def check_downloads(config_bundle, cache_dir):
if not hasher.hexdigest().lower() == hash_hex.lower():
raise HashMismatchError(download_path)
def unpack_downloads(config_bundle, cache_dir, output_dir, extractors=None):
"""
Unpack downloads in the downloads cache to output_dir. Assumes all downloads are retrieved.
@@ -180,6 +191,8 @@ def unpack_downloads(config_bundle, cache_dir, output_dir, extractors=None):
strip_leading_dirs_path = Path(download_properties.strip_leading_dirs)
extractor_func(
archive_path=download_path, output_dir=output_dir,
archive_path=download_path,
output_dir=output_dir,
unpack_dir=Path(download_properties.output_path),
relative_to=strip_leading_dirs_path, extractors=extractors)
relative_to=strip_leading_dirs_path,
extractors=extractors)

View File

@@ -3,7 +3,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Archive extraction utilities
"""
@@ -14,15 +13,15 @@ import subprocess
import tarfile
from pathlib import Path, PurePosixPath
from .common import (
SEVENZIP_USE_REGISTRY, BuildkitAbort, PlatformEnum, ExtractorEnum, get_logger,
get_running_platform)
from .common import (SEVENZIP_USE_REGISTRY, BuildkitAbort, PlatformEnum, ExtractorEnum, get_logger,
get_running_platform)
DEFAULT_EXTRACTORS = {
ExtractorEnum.SEVENZIP: SEVENZIP_USE_REGISTRY,
ExtractorEnum.TAR: 'tar',
}
def _find_7z_by_registry():
"""
Return a string to 7-zip's 7z.exe from the Windows Registry.
@@ -42,6 +41,7 @@ def _find_7z_by_registry():
get_logger().error('7z.exe not found at path from registry: %s', sevenzip_path)
return sevenzip_path
def _find_extractor_by_cmd(extractor_cmd):
"""Returns a string path to the binary; None if it couldn't be found"""
if not extractor_cmd:
@@ -50,6 +50,7 @@ def _find_extractor_by_cmd(extractor_cmd):
return extractor_cmd
return shutil.which(extractor_cmd)
def _process_relative_to(unpack_root, relative_to):
"""
For an extractor that doesn't support an automatic transform, move the extracted
@@ -57,14 +58,15 @@ def _process_relative_to(unpack_root, relative_to):
"""
relative_root = unpack_root / relative_to
if not relative_root.is_dir():
get_logger().error(
'Could not find relative_to directory in extracted files: %s', relative_to)
get_logger().error('Could not find relative_to directory in extracted files: %s',
relative_to)
raise BuildkitAbort()
for src_path in relative_root.iterdir():
dest_path = unpack_root / src_path.name
src_path.rename(dest_path)
relative_root.rmdir()
def prune_dir(unpack_root, ignore_files):
"""
Delete files under unpack_root listed in ignore_files. Returns an iterable of unremovable files.
@@ -81,16 +83,16 @@ def prune_dir(unpack_root, ignore_files):
unremovable_files.add(Path(relative_file).as_posix())
return unremovable_files
def _extract_tar_with_7z(binary, archive_path, output_dir, relative_to):
get_logger().debug('Using 7-zip extractor')
if not relative_to is None and (output_dir / relative_to).exists():
get_logger().error(
'Temporary unpacking directory already exists: %s', output_dir / relative_to)
get_logger().error('Temporary unpacking directory already exists: %s',
output_dir / relative_to)
raise BuildkitAbort()
cmd1 = (binary, 'x', str(archive_path), '-so')
cmd2 = (binary, 'x', '-si', '-aoa', '-ttar', '-o{}'.format(str(output_dir)))
get_logger().debug('7z command line: %s | %s',
' '.join(cmd1), ' '.join(cmd2))
get_logger().debug('7z command line: %s | %s', ' '.join(cmd1), ' '.join(cmd2))
proc1 = subprocess.Popen(cmd1, stdout=subprocess.PIPE)
proc2 = subprocess.Popen(cmd2, stdin=proc1.stdout, stdout=subprocess.PIPE)
@@ -105,6 +107,7 @@ def _extract_tar_with_7z(binary, archive_path, output_dir, relative_to):
if not relative_to is None:
_process_relative_to(output_dir, relative_to)
def _extract_tar_with_tar(binary, archive_path, output_dir, relative_to):
get_logger().debug('Using BSD or GNU tar extractor')
output_dir.mkdir(exist_ok=True)
@@ -120,10 +123,13 @@ def _extract_tar_with_tar(binary, archive_path, output_dir, relative_to):
if not relative_to is None:
_process_relative_to(output_dir, relative_to)
def _extract_tar_with_python(archive_path, output_dir, relative_to):
get_logger().debug('Using pure Python tar extractor')
class NoAppendList(list):
"""Hack to workaround memory issues with large tar files"""
def append(self, obj):
pass
@@ -149,8 +155,7 @@ def _extract_tar_with_python(archive_path, output_dir, relative_to):
if relative_to is None:
destination = output_dir / PurePosixPath(tarinfo.name)
else:
destination = output_dir / PurePosixPath(tarinfo.name).relative_to(
relative_to)
destination = output_dir / PurePosixPath(tarinfo.name).relative_to(relative_to)
if tarinfo.issym() and not symlink_supported:
# In this situation, TarFile.makelink() will try to create a copy of the
# target. But this fails because TarFile.members is empty
@@ -159,8 +164,8 @@ def _extract_tar_with_python(archive_path, output_dir, relative_to):
continue
if tarinfo.islnk():
# Derived from TarFile.extract()
new_target = output_dir / PurePosixPath(tarinfo.linkname).relative_to(
relative_to)
new_target = output_dir / PurePosixPath(
tarinfo.linkname).relative_to(relative_to)
tarinfo._link_target = new_target.as_posix() # pylint: disable=protected-access
if destination.is_symlink():
destination.unlink()
@@ -169,8 +174,12 @@ def _extract_tar_with_python(archive_path, output_dir, relative_to):
get_logger().exception('Exception thrown for tar member: %s', tarinfo.name)
raise BuildkitAbort()
def extract_tar_file(archive_path, output_dir, relative_to, #pylint: disable=too-many-arguments
extractors=None):
def extract_tar_file(
archive_path,
output_dir,
relative_to, #pylint: disable=too-many-arguments
extractors=None):
"""
Extract regular or compressed tar archive into the output directory.
@@ -208,8 +217,12 @@ def extract_tar_file(archive_path, output_dir, relative_to, #pylint: disable=too
# Fallback to Python-based extractor on all platforms
_extract_tar_with_python(archive_path, output_dir, relative_to)
def extract_with_7z(archive_path, output_dir, relative_to, #pylint: disable=too-many-arguments
extractors=None):
def extract_with_7z(
archive_path,
output_dir,
relative_to, #pylint: disable=too-many-arguments
extractors=None):
"""
Extract archives with 7-zip into the output directory.
Only supports archives with one layer of unpacking, so compressed tar archives don't work.
@@ -237,8 +250,8 @@ def extract_with_7z(archive_path, output_dir, relative_to, #pylint: disable=too-
sevenzip_bin = _find_extractor_by_cmd(sevenzip_cmd)
if not relative_to is None and (output_dir / relative_to).exists():
get_logger().error(
'Temporary unpacking directory already exists: %s', output_dir / relative_to)
get_logger().error('Temporary unpacking directory already exists: %s',
output_dir / relative_to)
raise BuildkitAbort()
cmd = (sevenzip_bin, 'x', str(archive_path), '-aoa', '-o{}'.format(str(output_dir)))
get_logger().debug('7z command line: %s', ' '.join(cmd))

View File

@@ -3,7 +3,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utilities for reading and copying patches"""
import shutil
@@ -15,6 +14,7 @@ from .common import ENCODING, get_logger, ensure_empty_dir
# Default patches/ directory is next to buildkit
_DEFAULT_PATCH_DIR = Path(__file__).absolute().parent.parent / 'patches'
def patch_paths_by_bundle(config_bundle, patch_dir=_DEFAULT_PATCH_DIR):
"""
Returns an iterator of pathlib.Path to patch files in the proper order
@@ -29,6 +29,7 @@ def patch_paths_by_bundle(config_bundle, patch_dir=_DEFAULT_PATCH_DIR):
for relative_path in config_bundle.patch_order:
yield patch_dir / relative_path
def export_patches(config_bundle, path, series=Path('series'), patch_dir=_DEFAULT_PATCH_DIR):
"""
Writes patches and a series file to the directory specified by path.
@@ -53,6 +54,7 @@ def export_patches(config_bundle, path, series=Path('series'), patch_dir=_DEFAUL
with (path / series).open('w', encoding=ENCODING) as file_obj:
file_obj.write(str(config_bundle.patch_order))
def apply_patches(patch_path_iter, tree_path, reverse=False, patch_bin_path=None):
"""
Applies or reverses a list of patches
@@ -68,8 +70,7 @@ def apply_patches(patch_path_iter, tree_path, reverse=False, patch_bin_path=None
"""
patch_paths = list(patch_path_iter)
if patch_bin_path is None:
windows_patch_bin_path = (tree_path /
'third_party' / 'git' / 'usr' / 'bin' / 'patch.exe')
windows_patch_bin_path = (tree_path / 'third_party' / 'git' / 'usr' / 'bin' / 'patch.exe')
patch_bin_path = Path(shutil.which('patch') or windows_patch_bin_path)
if not patch_bin_path.exists():
raise ValueError('Could not find the patch binary')
@@ -79,15 +80,16 @@ def apply_patches(patch_path_iter, tree_path, reverse=False, patch_bin_path=None
logger = get_logger()
for patch_path, patch_num in zip(patch_paths, range(1, len(patch_paths) + 1)):
cmd = [
str(patch_bin_path), '-p1', '--ignore-whitespace', '-i', str(patch_path),
'-d', str(tree_path), '--no-backup-if-mismatch']
str(patch_bin_path), '-p1', '--ignore-whitespace', '-i',
str(patch_path), '-d',
str(tree_path), '--no-backup-if-mismatch'
]
if reverse:
cmd.append('--reverse')
log_word = 'Reversing'
else:
cmd.append('--forward')
log_word = 'Applying'
logger.info(
'* %s %s (%s/%s)', log_word, patch_path.name, patch_num, len(patch_paths))
logger.info('* %s %s (%s/%s)', log_word, patch_path.name, patch_num, len(patch_paths))
logger.debug(' '.join(cmd))
subprocess.run(cmd, check=True)

View File

@@ -4,7 +4,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generates updating_patch_order.list in the buildspace for updating patches"""
import argparse
@@ -16,18 +15,25 @@ from buildkit.common import ENCODING
from buildkit.cli import NewBaseBundleAction
sys.path.pop(0)
def main(arg_list=None):
"""CLI entrypoint"""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('base_bundle', action=NewBaseBundleAction,
help='The base bundle to generate a patch order from')
parser.add_argument('--output', metavar='PATH', type=Path,
default='buildspace/updating_patch_order.list',
help='The patch order file to write')
parser.add_argument(
'base_bundle',
action=NewBaseBundleAction,
help='The base bundle to generate a patch order from')
parser.add_argument(
'--output',
metavar='PATH',
type=Path,
default='buildspace/updating_patch_order.list',
help='The patch order file to write')
args = parser.parse_args(args=arg_list)
with args.output.open('w', encoding=ENCODING) as file_obj:
file_obj.writelines('%s\n' % x for x in args.base_bundle.patches)
if __name__ == "__main__":
main()

View File

@@ -3,7 +3,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Run Pylint over buildkit"""
import argparse
@@ -14,18 +13,18 @@ sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
import pylint_devutils
sys.path.pop(0)
def main():
"""CLI entrypoint"""
parser = argparse.ArgumentParser(description='Run Pylint over buildkit')
parser.add_argument('--hide-fixme', action='store_true', help='Hide "fixme" Pylint warnings.')
parser.add_argument(
'--hide-fixme', action='store_true',
help='Hide "fixme" Pylint warnings.')
parser.add_argument(
'--show-locally-disabled', action='store_true',
'--show-locally-disabled',
action='store_true',
help='Show "locally-disabled" Pylint warnings.')
args = parser.parse_args()
disable = list()
disable = ['bad-continuation']
if args.hide_fixme:
disable.append('fixme')
@@ -46,5 +45,6 @@ def main():
exit(1)
exit(0)
if __name__ == '__main__':
main()

View File

@@ -3,7 +3,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Run Pylint over any module"""
import argparse
@@ -13,6 +12,7 @@ from pathlib import Path
from pylint import epylint as lint
def run_pylint(modulepath, pylint_options):
"""Runs Pylint. Returns a boolean indicating success"""
pylint_stats = Path('/run/user/{}/pylint_stats'.format(os.getuid()))
@@ -34,19 +34,17 @@ def run_pylint(modulepath, pylint_options):
return False
return True
def main():
"""CLI entrypoint"""
parser = argparse.ArgumentParser(description='Run Pylint over an arbitrary module')
parser.add_argument('--hide-fixme', action='store_true', help='Hide "fixme" Pylint warnings.')
parser.add_argument(
'--hide-fixme', action='store_true',
help='Hide "fixme" Pylint warnings.')
parser.add_argument(
'--show-locally-disabled', action='store_true',
'--show-locally-disabled',
action='store_true',
help='Show "locally-disabled" Pylint warnings.')
parser.add_argument(
'modulepath', type=Path,
help='Path to the module to check')
parser.add_argument('modulepath', type=Path, help='Path to the module to check')
args = parser.parse_args()
if not args.modulepath.exists():
@@ -55,6 +53,7 @@ def main():
disables = [
'wrong-import-position',
'bad-continuation',
]
if args.hide_fixme:
@@ -71,5 +70,6 @@ def main():
exit(1)
exit(0)
if __name__ == '__main__':
main()

View File

@@ -3,7 +3,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Update binary pruning and domain substitution lists automatically.
@@ -19,17 +18,15 @@ from pathlib import Path, PurePosixPath
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
from buildkit.cli import get_basebundle_verbosely
from buildkit.common import (
BUILDSPACE_DOWNLOADS, BUILDSPACE_TREE, ENCODING, BuildkitAbort, get_logger, dir_empty)
from buildkit.common import (BUILDSPACE_DOWNLOADS, BUILDSPACE_TREE, ENCODING, BuildkitAbort,
get_logger, dir_empty)
from buildkit.domain_substitution import TREE_ENCODINGS
from buildkit import source_retrieval
sys.path.pop(0)
# NOTE: Include patterns have precedence over exclude patterns
# pathlib.Path.match() paths to include in binary pruning
PRUNING_INCLUDE_PATTERNS = [
'components/domain_reliability/baked_in_configs/*'
]
PRUNING_INCLUDE_PATTERNS = ['components/domain_reliability/baked_in_configs/*']
# pathlib.Path.match() paths to exclude from binary pruning
PRUNING_EXCLUDE_PATTERNS = [
@@ -72,43 +69,19 @@ PRUNING_EXCLUDE_PATTERNS = [
# NOTE: Domain substitution path prefix exclusion has precedence over inclusion patterns
# Paths to exclude by prefixes of the POSIX representation for domain substitution
DOMAIN_EXCLUDE_PREFIXES = [
'components/test/',
'net/http/transport_security_state_static.json'
]
DOMAIN_EXCLUDE_PREFIXES = ['components/test/', 'net/http/transport_security_state_static.json']
# pathlib.Path.match() patterns to include in domain substitution
DOMAIN_INCLUDE_PATTERNS = [
'*.h',
'*.hh',
'*.hpp',
'*.hxx',
'*.cc',
'*.cpp',
'*.cxx',
'*.c',
'*.h',
'*.json',
'*.js',
'*.html',
'*.htm',
'*.css',
'*.py*',
'*.grd',
'*.sql',
'*.idl',
'*.mk',
'*.gyp*',
'makefile',
'*.txt',
'*.xml',
'*.mm',
'*.jinja*'
'*.h', '*.hh', '*.hpp', '*.hxx', '*.cc', '*.cpp', '*.cxx', '*.c', '*.h', '*.json', '*.js',
'*.html', '*.htm', '*.css', '*.py*', '*.grd', '*.sql', '*.idl', '*.mk', '*.gyp*', 'makefile',
'*.txt', '*.xml', '*.mm', '*.jinja*'
]
# Binary-detection constant
_TEXTCHARS = bytearray({7, 8, 9, 10, 12, 13, 27} | set(range(0x20, 0x100)) - {0x7f})
def _is_binary(bytes_data):
"""
Returns True if the data seems to be binary data (i.e. not human readable); False otherwise
@@ -116,6 +89,7 @@ def _is_binary(bytes_data):
# From: https://stackoverflow.com/a/7392391
return bool(bytes_data.translate(None, _TEXTCHARS))
def should_prune(path, relative_path):
"""
Returns True if a path should be pruned from the buildspace tree; False otherwise
@@ -141,6 +115,7 @@ def should_prune(path, relative_path):
# Passed all filtering; do not prune
return False
def _check_regex_match(file_path, search_regex):
"""
Returns True if a regex pattern matches a file; False otherwise
@@ -161,6 +136,7 @@ def _check_regex_match(file_path, search_regex):
return True
return False
def should_domain_substitute(path, relative_path, search_regex):
"""
Returns True if a path should be domain substituted in the buildspace tree; False otherwise
@@ -178,6 +154,7 @@ def should_domain_substitute(path, relative_path, search_regex):
return _check_regex_match(path, search_regex)
return False
def compute_lists(buildspace_tree, search_regex):
"""
Compute the binary pruning and domain substitution lists of the buildspace tree.
@@ -229,32 +206,51 @@ def compute_lists(buildspace_tree, search_regex):
raise BuildkitAbort()
return sorted(pruning_set), sorted(domain_substitution_set)
def main(args_list=None):
"""CLI entrypoint"""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'-a', '--auto-download', action='store_true',
'-a',
'--auto-download',
action='store_true',
help='If specified, it will download the source code and dependencies '
'for the --base-bundle given. Otherwise, only an existing '
'buildspace tree will be used.')
'for the --base-bundle given. Otherwise, only an existing '
'buildspace tree will be used.')
parser.add_argument(
'-b', '--base-bundle', metavar='NAME', type=get_basebundle_verbosely,
default='common', help='The base bundle to use. Default: %(default)s')
'-b',
'--base-bundle',
metavar='NAME',
type=get_basebundle_verbosely,
default='common',
help='The base bundle to use. Default: %(default)s')
parser.add_argument(
'-p', '--pruning', metavar='PATH', type=Path,
'-p',
'--pruning',
metavar='PATH',
type=Path,
default='resources/config_bundles/common/pruning.list',
help='The path to store pruning.list. Default: %(default)s')
parser.add_argument(
'-d', '--domain-substitution', metavar='PATH', type=Path,
'-d',
'--domain-substitution',
metavar='PATH',
type=Path,
default='resources/config_bundles/common/domain_substitution.list',
help='The path to store domain_substitution.list. Default: %(default)s')
parser.add_argument(
'--tree', metavar='PATH', type=Path, default=BUILDSPACE_TREE,
'--tree',
metavar='PATH',
type=Path,
default=BUILDSPACE_TREE,
help=('The path to the buildspace tree to create. '
'If it is not empty, the source will not be unpacked. '
'Default: %(default)s'))
parser.add_argument(
'--downloads', metavar='PATH', type=Path, default=BUILDSPACE_DOWNLOADS,
'--downloads',
metavar='PATH',
type=Path,
default=BUILDSPACE_DOWNLOADS,
help=('The path to the buildspace downloads directory. '
'It must already exist. Default: %(default)s'))
try:
@@ -278,5 +274,6 @@ def main(args_list=None):
with args.domain_substitution.open('w', encoding=ENCODING) as file_obj:
file_obj.writelines('%s\n' % line for line in domain_substitution_list)
if __name__ == "__main__":
main()

View File

@@ -3,7 +3,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Refreshes patches of all configs via quilt until the first patch that
requires manual modification

View File

@@ -4,7 +4,6 @@
# Copyright (c) 2018 The ungoogled-chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Run sanity checking algorithms over the base bundles and patches.
It checks the following:
@@ -33,19 +32,16 @@ import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
from buildkit.common import (
CONFIG_BUNDLES_DIR, ENCODING, PATCHES_DIR, BuildkitAbort, get_logger,
get_resources_dir)
from buildkit.common import (CONFIG_BUNDLES_DIR, ENCODING, PATCHES_DIR, BuildkitAbort, get_logger,
get_resources_dir)
from buildkit.config import BASEBUNDLEMETA_INI, BaseBundleMetaIni, ConfigBundle
from buildkit.third_party import unidiff
sys.path.pop(0)
BaseBundleResult = collections.namedtuple(
'BaseBundleResult',
('leaves', 'gn_flags', 'patches'))
BaseBundleResult = collections.namedtuple('BaseBundleResult', ('leaves', 'gn_flags', 'patches'))
ExplorationJournal = collections.namedtuple(
'ExplorationJournal',
('unexplored_set', 'results', 'dependents', 'unused_patches'))
'ExplorationJournal', ('unexplored_set', 'results', 'dependents', 'unused_patches'))
def _check_patches(bundle, logger):
"""
@@ -69,6 +65,7 @@ def _check_patches(bundle, logger):
warnings = False
return warnings
def _merge_disjoints(pair_iterable, current_name, logger):
"""
Merges disjoint sets with errors
@@ -93,6 +90,7 @@ def _merge_disjoints(pair_iterable, current_name, logger):
warnings = True
return warnings
def _populate_set_with_gn_flags(new_set, base_bundle, logger):
"""
Adds items into set new_set from the base bundle's GN flags
@@ -111,14 +109,14 @@ def _populate_set_with_gn_flags(new_set, base_bundle, logger):
return warnings
for current in iterator:
if current < previous:
logger.warning(
'In base bundle "%s" GN flags: "%s" should be sorted before "%s"',
base_bundle.name, current, previous)
logger.warning('In base bundle "%s" GN flags: "%s" should be sorted before "%s"',
base_bundle.name, current, previous)
warnings = True
new_set.add('%s=%s' % (current, base_bundle.gn_flags[current]))
previous = current
return warnings
def _populate_set_with_patches(new_set, unused_patches, base_bundle, logger):
"""
Adds entries to set new_set from the base bundle's patch_order if they are unique.
@@ -128,15 +126,15 @@ def _populate_set_with_patches(new_set, unused_patches, base_bundle, logger):
warnings = False
for current in base_bundle.patches:
if current in new_set:
logger.warning(
'In base bundle "%s" patch_order: "%s" already appeared once',
base_bundle.name, current)
logger.warning('In base bundle "%s" patch_order: "%s" already appeared once',
base_bundle.name, current)
warnings = True
else:
unused_patches.discard(current)
new_set.add(current)
return warnings
def _explore_base_bundle(current_name, journal, logger):
"""
Explore the base bundle given by current_name. Modifies journal
@@ -162,16 +160,12 @@ def _explore_base_bundle(current_name, journal, logger):
current_meta = BaseBundleMetaIni(current_base_bundle.path / BASEBUNDLEMETA_INI)
# Populate current base bundle's data
current_results = BaseBundleResult(
leaves=set(),
gn_flags=set(),
patches=set())
warnings = _populate_set_with_gn_flags(
current_results.gn_flags, current_base_bundle, logger) or warnings
warnings = _populate_set_with_patches(
current_results.patches, journal.unused_patches, current_base_bundle, logger) or warnings
warnings = _check_patches(
current_base_bundle, logger) or warnings
current_results = BaseBundleResult(leaves=set(), gn_flags=set(), patches=set())
warnings = _populate_set_with_gn_flags(current_results.gn_flags, current_base_bundle,
logger) or warnings
warnings = _populate_set_with_patches(current_results.patches, journal.unused_patches,
current_base_bundle, logger) or warnings
warnings = _check_patches(current_base_bundle, logger) or warnings
# Set an empty set just in case this node has no dependents
if current_name not in journal.dependents:
@@ -188,12 +182,10 @@ def _explore_base_bundle(current_name, journal, logger):
# Merge sets of dependencies with the current
warnings = _merge_disjoints((
('Patches', current_results.patches,
journal.results[dependency_name].patches, False),
('GN flags', current_results.gn_flags,
journal.results[dependency_name].gn_flags, False),
('Dependencies', current_results.leaves,
journal.results[dependency_name].leaves, True),
('Patches', current_results.patches, journal.results[dependency_name].patches, False),
('GN flags', current_results.gn_flags, journal.results[dependency_name].gn_flags,
False),
('Dependencies', current_results.leaves, journal.results[dependency_name].leaves, True),
), current_name, logger) or warnings
if not current_results.leaves:
# This node is a leaf node
@@ -204,6 +196,7 @@ def _explore_base_bundle(current_name, journal, logger):
return warnings
def _check_mergability(info_tuple_list, dependents, logger):
"""
Checks if entries of config files from dependents can be combined into a common dependency
@@ -222,19 +215,18 @@ def _check_mergability(info_tuple_list, dependents, logger):
# Keep only common entries between the current dependent and
# other processed dependents for the current dependency
for display_name, set_getter in info_tuple_list:
set_dict[display_name].intersection_update(
set_getter(dependent_name))
set_dict[display_name].intersection_update(set_getter(dependent_name))
# Check if there are any common entries in all dependents for the
# given dependency
for display_name, common_set in set_dict.items():
if common_set:
logger.warning(
'Base bundles %s can combine %s into "%s": %s',
dependents[dependency_name], display_name, dependency_name,
common_set)
logger.warning('Base bundles %s can combine %s into "%s": %s',
dependents[dependency_name], display_name, dependency_name,
common_set)
warnings = True
return warnings
def main():
"""CLI entrypoint"""
@@ -246,23 +238,20 @@ def main():
journal = ExplorationJournal(
# base bundles not explored yet
unexplored_set=set(map(
lambda x: x.name,
config_bundles_dir.iterdir())),
unexplored_set=set(map(lambda x: x.name, config_bundles_dir.iterdir())),
# base bundle name -> namedtuple(leaves=set(), gn_flags=set())
results=dict(),
# dependency -> set of dependents
dependents=dict(),
# patches unused by patch orders
unused_patches=set(map(
lambda x: str(x.relative_to(patches_dir)),
filter(lambda x: not x.is_dir(), patches_dir.rglob('*'))))
)
unused_patches=set(
map(lambda x: str(x.relative_to(patches_dir)),
filter(lambda x: not x.is_dir(), patches_dir.rglob('*')))))
try:
# Explore and validate base bundles
while journal.unexplored_set:
warnings = _explore_base_bundle(
next(iter(journal.unexplored_set)), journal, logger) or warnings
warnings = _explore_base_bundle(next(iter(journal.unexplored_set)), journal,
logger) or warnings
# Check for config file entries that should be merged into dependencies
warnings = _check_mergability((
('GN flags', lambda x: journal.results[x].gn_flags),
@@ -278,6 +267,7 @@ def main():
exit(1)
exit(0)
if __name__ == '__main__':
if sys.argv[1:]:
print(__doc__)

5
devutils/yapf_buildkit.sh Executable file
View File

@@ -0,0 +1,5 @@
#!/bin/bash
set -eux
python3 -m yapf --style '.style.yapf' -e '*/third_party/*' -rpi buildkit

5
devutils/yapf_devutils.sh Executable file
View File

@@ -0,0 +1,5 @@
#!/bin/bash
set -eux
python3 -m yapf --style '.style.yapf' -ri $@