mirror of
https://github.com/morgan9e/helium
synced 2026-04-14 16:34:06 +09:00
Update CI config, scripts, and docs for python 3.10
This commit is contained in:
@@ -9,7 +9,7 @@ container:
|
|||||||
|
|
||||||
code_check_task:
|
code_check_task:
|
||||||
pip_cache:
|
pip_cache:
|
||||||
folder: /usr/local/lib/python3.9/site-packages
|
folder: /usr/local/lib/python3.10/site-packages
|
||||||
fingerprint_script: cat .cirrus_requirements.txt
|
fingerprint_script: cat .cirrus_requirements.txt
|
||||||
populate_script: pip install -r .cirrus_requirements.txt
|
populate_script: pip install -r .cirrus_requirements.txt
|
||||||
utils_script:
|
utils_script:
|
||||||
@@ -26,7 +26,7 @@ validate_config_task:
|
|||||||
|
|
||||||
validate_with_source_task:
|
validate_with_source_task:
|
||||||
pip_cache:
|
pip_cache:
|
||||||
folder: /usr/local/lib/python3.9/site-packages
|
folder: /usr/local/lib/python3.10/site-packages
|
||||||
fingerprint_script: cat .cirrus_requirements.txt
|
fingerprint_script: cat .cirrus_requirements.txt
|
||||||
populate_script: pip install -r .cirrus_requirements.txt
|
populate_script: pip install -r .cirrus_requirements.txt
|
||||||
chromium_download_script: |
|
chromium_download_script: |
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# Dockerfile for Python 3 with xz-utils (for tar.xz unpacking)
|
# Dockerfile for Python 3 with xz-utils (for tar.xz unpacking)
|
||||||
|
|
||||||
FROM python:3.9-slim-bullseye
|
FROM python:3.10-slim-bookworm
|
||||||
|
|
||||||
RUN apt update && apt install -y xz-utils patch axel curl git
|
RUN apt update && apt install -y xz-utils patch axel curl git
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
# Based on Python package versions in Debian bullseye
|
# Based on Python package versions in Debian bookworm
|
||||||
# https://packages.debian.org/bullseye/python/
|
# https://packages.debian.org/bookworm/python/
|
||||||
astroid==2.5.1 # via pylint
|
astroid==2.14.2 # via pylint
|
||||||
pylint==2.7.2
|
pylint==2.16.2
|
||||||
pytest-cov==2.10.1
|
pytest-cov==4.0.0
|
||||||
pytest==6.0.2
|
pytest==7.2.1
|
||||||
httplib2==0.18.1
|
httplib2==0.20.4
|
||||||
requests==2.25.1
|
requests==2.28.1
|
||||||
yapf==0.30.0
|
yapf==0.32.0
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ from pathlib import Path
|
|||||||
|
|
||||||
sys.path.insert(0, str(Path(__file__).resolve().parent.parent / 'utils'))
|
sys.path.insert(0, str(Path(__file__).resolve().parent.parent / 'utils'))
|
||||||
from downloads import DownloadInfo, schema
|
from downloads import DownloadInfo, schema
|
||||||
|
|
||||||
sys.path.pop(0)
|
sys.path.pop(0)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -27,8 +27,7 @@ def main():
|
|||||||
Path(input_name).read_text(encoding='UTF-8').splitlines()))
|
Path(input_name).read_text(encoding='UTF-8').splitlines()))
|
||||||
for file_name in file_iter:
|
for file_name in file_iter:
|
||||||
if not Path(args.root_dir, file_name).exists():
|
if not Path(args.root_dir, file_name).exists():
|
||||||
print('ERROR: Path "{}" from file "{}" does not exist.'.format(
|
print(f'ERROR: Path "{file_name}" from file "{input_name}" does not exist.',
|
||||||
file_name, input_name),
|
|
||||||
file=sys.stderr)
|
file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ from pathlib import Path
|
|||||||
|
|
||||||
sys.path.insert(0, str(Path(__file__).resolve().parent.parent / 'utils'))
|
sys.path.insert(0, str(Path(__file__).resolve().parent.parent / 'utils'))
|
||||||
from _common import ENCODING, get_logger
|
from _common import ENCODING, get_logger
|
||||||
|
|
||||||
sys.path.pop(0)
|
sys.path.pop(0)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ from third_party import unidiff
|
|||||||
|
|
||||||
sys.path.insert(0, str(Path(__file__).resolve().parent.parent / 'utils'))
|
sys.path.insert(0, str(Path(__file__).resolve().parent.parent / 'utils'))
|
||||||
from _common import ENCODING, get_logger, parse_series # pylint: disable=wrong-import-order
|
from _common import ENCODING, get_logger, parse_series # pylint: disable=wrong-import-order
|
||||||
|
|
||||||
sys.path.pop(0)
|
sys.path.pop(0)
|
||||||
|
|
||||||
# File suffixes to ignore for checking unused patches
|
# File suffixes to ignore for checking unused patches
|
||||||
|
|||||||
@@ -23,7 +23,6 @@ def main():
|
|||||||
|
|
||||||
disables = [
|
disables = [
|
||||||
'wrong-import-position',
|
'wrong-import-position',
|
||||||
'bad-continuation',
|
|
||||||
'duplicate-code',
|
'duplicate-code',
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -33,7 +32,7 @@ def main():
|
|||||||
disables.append('locally-disabled')
|
disables.append('locally-disabled')
|
||||||
|
|
||||||
pylint_options = [
|
pylint_options = [
|
||||||
'--disable={}'.format(','.join(disables)),
|
f"--disable={','.join(disables)}",
|
||||||
'--jobs=4',
|
'--jobs=4',
|
||||||
'--score=n',
|
'--score=n',
|
||||||
'--persistent=n',
|
'--persistent=n',
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ class ChangeDir:
|
|||||||
"""
|
"""
|
||||||
Changes directory to path in with statement
|
Changes directory to path in with statement
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, path):
|
def __init__(self, path):
|
||||||
self._path = path
|
self._path = path
|
||||||
self._orig_path = os.getcwd()
|
self._orig_path = os.getcwd()
|
||||||
@@ -31,12 +32,12 @@ class ChangeDir:
|
|||||||
|
|
||||||
def run_pylint(module_path, pylint_options, ignore_prefixes=tuple()):
|
def run_pylint(module_path, pylint_options, ignore_prefixes=tuple()):
|
||||||
"""Runs Pylint. Returns a boolean indicating success"""
|
"""Runs Pylint. Returns a boolean indicating success"""
|
||||||
pylint_stats = Path('/run/user/{}/pylint_stats'.format(os.getuid()))
|
pylint_stats = Path(f'/run/user/{os.getuid()}/pylint_stats')
|
||||||
if not pylint_stats.parent.is_dir(): #pylint: disable=no-member
|
if not pylint_stats.parent.is_dir(): #pylint: disable=no-member
|
||||||
pylint_stats = Path('/run/shm/pylint_stats')
|
pylint_stats = Path('/run/shm/pylint_stats')
|
||||||
os.environ['PYLINTHOME'] = str(pylint_stats)
|
os.environ['PYLINTHOME'] = str(pylint_stats)
|
||||||
|
|
||||||
input_paths = list()
|
input_paths = []
|
||||||
if not module_path.exists():
|
if not module_path.exists():
|
||||||
print('ERROR: Cannot find', module_path)
|
print('ERROR: Cannot find', module_path)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
@@ -75,12 +76,11 @@ def main():
|
|||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
if not args.module_path.exists():
|
if not args.module_path.exists():
|
||||||
print('ERROR: Module path "{}" does not exist'.format(args.module_path))
|
print(f'ERROR: Module path "{args.module_path}" does not exist')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
disables = [
|
disables = [
|
||||||
'wrong-import-position',
|
'wrong-import-position',
|
||||||
'bad-continuation',
|
|
||||||
]
|
]
|
||||||
|
|
||||||
if args.hide_fixme:
|
if args.hide_fixme:
|
||||||
@@ -89,7 +89,7 @@ def main():
|
|||||||
disables.append('locally-disabled')
|
disables.append('locally-disabled')
|
||||||
|
|
||||||
pylint_options = [
|
pylint_options = [
|
||||||
'--disable={}'.format(','.join(disables)),
|
f"--disable={','.join(disables)}",
|
||||||
'--jobs=4',
|
'--jobs=4',
|
||||||
'--score=n',
|
'--score=n',
|
||||||
'--persistent=n',
|
'--persistent=n',
|
||||||
|
|||||||
@@ -21,21 +21,18 @@ def main():
|
|||||||
help='Show "locally-disabled" Pylint warnings.')
|
help='Show "locally-disabled" Pylint warnings.')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
disable = ['bad-continuation']
|
|
||||||
|
|
||||||
if args.hide_fixme:
|
|
||||||
disable.append('fixme')
|
|
||||||
if not args.show_locally_disabled:
|
|
||||||
disable.append('locally-disabled')
|
|
||||||
|
|
||||||
pylint_options = [
|
pylint_options = [
|
||||||
'--disable={}'.format(','.join(disable)),
|
|
||||||
'--jobs=4',
|
'--jobs=4',
|
||||||
'--max-args=7',
|
'--max-args=7',
|
||||||
'--score=n',
|
'--score=n',
|
||||||
'--persistent=n',
|
'--persistent=n',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
if args.hide_fixme:
|
||||||
|
pylint_options.append('--disable=fixme')
|
||||||
|
if not args.show_locally_disabled:
|
||||||
|
pylint_options.append('--disable=locally-disabled')
|
||||||
|
|
||||||
ignore_prefixes = [
|
ignore_prefixes = [
|
||||||
('third_party', ),
|
('third_party', ),
|
||||||
('tests', ),
|
('tests', ),
|
||||||
|
|||||||
@@ -11,11 +11,13 @@ import sys
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
sys.path.insert(0, str(Path(__file__).resolve().parent.parent.parent / 'utils'))
|
sys.path.insert(0, str(Path(__file__).resolve().parent.parent.parent / 'utils'))
|
||||||
from _common import get_logger, set_logging_level
|
from _common import ENCODING, get_logger, set_logging_level
|
||||||
|
|
||||||
sys.path.pop(0)
|
sys.path.pop(0)
|
||||||
|
|
||||||
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
|
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
|
||||||
from check_patch_files import check_series_duplicates
|
from check_patch_files import check_series_duplicates
|
||||||
|
|
||||||
sys.path.pop(0)
|
sys.path.pop(0)
|
||||||
|
|
||||||
|
|
||||||
@@ -33,7 +35,7 @@ def test_check_series_duplicates():
|
|||||||
'a.patch',
|
'a.patch',
|
||||||
'b.patch',
|
'b.patch',
|
||||||
'c.patch',
|
'c.patch',
|
||||||
]))
|
]), encoding=ENCODING)
|
||||||
assert not check_series_duplicates(patches_dir)
|
assert not check_series_duplicates(patches_dir)
|
||||||
|
|
||||||
get_logger().info('Check duplicates')
|
get_logger().info('Check duplicates')
|
||||||
@@ -42,7 +44,8 @@ def test_check_series_duplicates():
|
|||||||
'b.patch',
|
'b.patch',
|
||||||
'c.patch',
|
'c.patch',
|
||||||
'a.patch',
|
'a.patch',
|
||||||
]))
|
]),
|
||||||
|
encoding=ENCODING)
|
||||||
assert check_series_duplicates(patches_dir)
|
assert check_series_duplicates(patches_dir)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -11,11 +11,13 @@ import sys
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
sys.path.insert(0, str(Path(__file__).resolve().parent.parent.parent / 'utils'))
|
sys.path.insert(0, str(Path(__file__).resolve().parent.parent.parent / 'utils'))
|
||||||
from _common import get_logger, set_logging_level
|
from _common import ENCODING, get_logger, set_logging_level
|
||||||
|
|
||||||
sys.path.pop(0)
|
sys.path.pop(0)
|
||||||
|
|
||||||
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
|
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
|
||||||
import validate_patches
|
import validate_patches
|
||||||
|
|
||||||
sys.path.pop(0)
|
sys.path.pop(0)
|
||||||
|
|
||||||
|
|
||||||
@@ -30,8 +32,8 @@ def test_test_patches():
|
|||||||
|
|
||||||
def _run_test_patches(patch_content):
|
def _run_test_patches(patch_content):
|
||||||
with tempfile.TemporaryDirectory() as tmpdirname:
|
with tempfile.TemporaryDirectory() as tmpdirname:
|
||||||
Path(tmpdirname, 'foobar.txt').write_text(orig_file_content)
|
Path(tmpdirname, 'foobar.txt').write_text(orig_file_content, encoding=ENCODING)
|
||||||
Path(tmpdirname, 'test.patch').write_text(patch_content)
|
Path(tmpdirname, 'test.patch').write_text(patch_content, encoding=ENCODING)
|
||||||
_, patch_cache = validate_patches._load_all_patches(series_iter, Path(tmpdirname))
|
_, patch_cache = validate_patches._load_all_patches(series_iter, Path(tmpdirname))
|
||||||
required_files = validate_patches._get_required_files(patch_cache)
|
required_files = validate_patches._get_required_files(patch_cache)
|
||||||
files_under_test = validate_patches._retrieve_local_files(required_files,
|
files_under_test = validate_patches._retrieve_local_files(required_files,
|
||||||
|
|||||||
@@ -23,6 +23,7 @@ sys.path.insert(0, str(Path(__file__).resolve().parent.parent / 'utils'))
|
|||||||
from _common import get_logger
|
from _common import get_logger
|
||||||
from domain_substitution import DomainRegexList, TREE_ENCODINGS
|
from domain_substitution import DomainRegexList, TREE_ENCODINGS
|
||||||
from prune_binaries import CONTINGENT_PATHS
|
from prune_binaries import CONTINGENT_PATHS
|
||||||
|
|
||||||
sys.path.pop(0)
|
sys.path.pop(0)
|
||||||
|
|
||||||
# Encoding for output files
|
# Encoding for output files
|
||||||
@@ -383,9 +384,9 @@ def main(args_list=None):
|
|||||||
args.tree,
|
args.tree,
|
||||||
DomainRegexList(args.domain_regex).search_regex, args.processes)
|
DomainRegexList(args.domain_regex).search_regex, args.processes)
|
||||||
with args.pruning.open('w', encoding=_ENCODING) as file_obj:
|
with args.pruning.open('w', encoding=_ENCODING) as file_obj:
|
||||||
file_obj.writelines('%s\n' % line for line in pruning_set)
|
file_obj.writelines(f'{line}\n' for line in pruning_set)
|
||||||
with args.domain_substitution.open('w', encoding=_ENCODING) as file_obj:
|
with args.domain_substitution.open('w', encoding=_ENCODING) as file_obj:
|
||||||
file_obj.writelines('%s\n' % line for line in domain_substitution_set)
|
file_obj.writelines(f'{line}\n' for line in domain_substitution_set)
|
||||||
if unused_patterns.log_unused(args.error_unused) and args.error_unused:
|
if unused_patterns.log_unused(args.error_unused) and args.error_unused:
|
||||||
get_logger().error('Please update or remove unused patterns and/or prefixes. '
|
get_logger().error('Please update or remove unused patterns and/or prefixes. '
|
||||||
'The lists have still been updated with the remaining valid entries.')
|
'The lists have still been updated with the remaining valid entries.')
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ from pathlib import Path
|
|||||||
sys.path.insert(0, str(Path(__file__).resolve().parent.parent / 'utils'))
|
sys.path.insert(0, str(Path(__file__).resolve().parent.parent / 'utils'))
|
||||||
from _common import ENCODING, get_logger
|
from _common import ENCODING, get_logger
|
||||||
from patches import merge_patches
|
from patches import merge_patches
|
||||||
|
|
||||||
sys.path.pop(0)
|
sys.path.pop(0)
|
||||||
|
|
||||||
_SERIES = 'series'
|
_SERIES = 'series'
|
||||||
@@ -107,14 +108,14 @@ def unmerge_platform_patches(platform_patches_dir):
|
|||||||
return False
|
return False
|
||||||
orig_series = (platform_patches_dir / _SERIES_ORIG).read_text(encoding=ENCODING).splitlines()
|
orig_series = (platform_patches_dir / _SERIES_ORIG).read_text(encoding=ENCODING).splitlines()
|
||||||
# patch path -> list of lines after patch path and before next patch path
|
# patch path -> list of lines after patch path and before next patch path
|
||||||
path_comments = dict()
|
path_comments = {}
|
||||||
# patch path -> inline comment for patch
|
# patch path -> inline comment for patch
|
||||||
path_inline_comments = dict()
|
path_inline_comments = {}
|
||||||
previous_path = None
|
previous_path = None
|
||||||
for partial_path in orig_series:
|
for partial_path in orig_series:
|
||||||
if not partial_path or partial_path.startswith('#'):
|
if not partial_path or partial_path.startswith('#'):
|
||||||
if partial_path not in path_comments:
|
if partial_path not in path_comments:
|
||||||
path_comments[previous_path] = list()
|
path_comments[previous_path] = []
|
||||||
path_comments[previous_path].append(partial_path)
|
path_comments[previous_path].append(partial_path)
|
||||||
else:
|
else:
|
||||||
path_parts = partial_path.split(' #', maxsplit=1)
|
path_parts = partial_path.split(' #', maxsplit=1)
|
||||||
|
|||||||
@@ -23,12 +23,14 @@ from pathlib import Path
|
|||||||
sys.path.insert(0, str(Path(__file__).resolve().parent / 'third_party'))
|
sys.path.insert(0, str(Path(__file__).resolve().parent / 'third_party'))
|
||||||
import unidiff
|
import unidiff
|
||||||
from unidiff.constants import LINE_TYPE_EMPTY, LINE_TYPE_NO_NEWLINE
|
from unidiff.constants import LINE_TYPE_EMPTY, LINE_TYPE_NO_NEWLINE
|
||||||
|
|
||||||
sys.path.pop(0)
|
sys.path.pop(0)
|
||||||
|
|
||||||
sys.path.insert(0, str(Path(__file__).resolve().parent.parent / 'utils'))
|
sys.path.insert(0, str(Path(__file__).resolve().parent.parent / 'utils'))
|
||||||
from domain_substitution import TREE_ENCODINGS
|
from domain_substitution import TREE_ENCODINGS
|
||||||
from _common import ENCODING, get_logger, get_chromium_version, parse_series, add_common_params
|
from _common import ENCODING, get_logger, get_chromium_version, parse_series, add_common_params
|
||||||
from patches import dry_run_check
|
from patches import dry_run_check
|
||||||
|
|
||||||
sys.path.pop(0)
|
sys.path.pop(0)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -38,6 +40,7 @@ try:
|
|||||||
|
|
||||||
class _VerboseRetry(urllib3.util.Retry):
|
class _VerboseRetry(urllib3.util.Retry):
|
||||||
"""A more verbose version of HTTP Adatper about retries"""
|
"""A more verbose version of HTTP Adatper about retries"""
|
||||||
|
|
||||||
def sleep_for_retry(self, response=None):
|
def sleep_for_retry(self, response=None):
|
||||||
"""Sleeps for Retry-After, and logs the sleep time"""
|
"""Sleeps for Retry-After, and logs the sleep time"""
|
||||||
if response:
|
if response:
|
||||||
@@ -100,16 +103,16 @@ class _DepsNodeVisitor(ast.NodeVisitor):
|
|||||||
def visit_Call(self, node): #pylint: disable=invalid-name
|
def visit_Call(self, node): #pylint: disable=invalid-name
|
||||||
"""Override Call syntax handling"""
|
"""Override Call syntax handling"""
|
||||||
if node.func.id not in self._allowed_callables:
|
if node.func.id not in self._allowed_callables:
|
||||||
raise _UnexpectedSyntaxError('Unexpected call of "%s" at line %s, column %s' %
|
raise _UnexpectedSyntaxError(f'Unexpected call of "{node.func.id}" '
|
||||||
(node.func.id, node.lineno, node.col_offset))
|
f'at line {node.lineno}, column {node.col_offset}')
|
||||||
|
|
||||||
def generic_visit(self, node):
|
def generic_visit(self, node):
|
||||||
for ast_type in self._valid_syntax_types:
|
for ast_type in self._valid_syntax_types:
|
||||||
if isinstance(node, ast_type):
|
if isinstance(node, ast_type):
|
||||||
super().generic_visit(node)
|
super().generic_visit(node)
|
||||||
return
|
return
|
||||||
raise _UnexpectedSyntaxError('Unexpected {} at line {}, column {}'.format(
|
raise _UnexpectedSyntaxError(f'Unexpected {type(node).__name__} '
|
||||||
type(node).__name__, node.lineno, node.col_offset))
|
f'at line {node.lineno}, column {node.col_offset}')
|
||||||
|
|
||||||
|
|
||||||
def _validate_deps(deps_text):
|
def _validate_deps(deps_text):
|
||||||
@@ -124,6 +127,7 @@ def _validate_deps(deps_text):
|
|||||||
|
|
||||||
def _deps_var(deps_globals):
|
def _deps_var(deps_globals):
|
||||||
"""Return a function that implements DEPS's Var() function"""
|
"""Return a function that implements DEPS's Var() function"""
|
||||||
|
|
||||||
def _var_impl(var_name):
|
def _var_impl(var_name):
|
||||||
"""Implementation of Var() in DEPS"""
|
"""Implementation of Var() in DEPS"""
|
||||||
return deps_globals['vars'][var_name]
|
return deps_globals['vars'][var_name]
|
||||||
@@ -145,8 +149,8 @@ def _download_googlesource_file(download_session, repo_url, version, relative_pa
|
|||||||
googlesource.com repo as a string.
|
googlesource.com repo as a string.
|
||||||
"""
|
"""
|
||||||
if 'googlesource.com' not in repo_url:
|
if 'googlesource.com' not in repo_url:
|
||||||
raise ValueError('Repository URL is not a googlesource.com URL: {}'.format(repo_url))
|
raise ValueError(f'Repository URL is not a googlesource.com URL: {repo_url}')
|
||||||
full_url = repo_url + '/+/{}/{}?format=TEXT'.format(version, str(relative_path))
|
full_url = repo_url + f'/+/{version}/{str(relative_path)}?format=TEXT'
|
||||||
get_logger().debug('Downloading: %s', full_url)
|
get_logger().debug('Downloading: %s', full_url)
|
||||||
response = download_session.get(full_url)
|
response = download_session.get(full_url)
|
||||||
if response.status_code == 404:
|
if response.status_code == 404:
|
||||||
@@ -172,13 +176,13 @@ def _get_dep_value_url(deps_globals, dep_value):
|
|||||||
# Probably a Python format string
|
# Probably a Python format string
|
||||||
url = url.format(**deps_globals['vars'])
|
url = url.format(**deps_globals['vars'])
|
||||||
if url.count('@') != 1:
|
if url.count('@') != 1:
|
||||||
raise _PatchValidationError('Invalid number of @ symbols in URL: {}'.format(url))
|
raise _PatchValidationError(f'Invalid number of @ symbols in URL: {url}')
|
||||||
return url
|
return url
|
||||||
|
|
||||||
|
|
||||||
def _process_deps_entries(deps_globals, child_deps_tree, child_path, deps_use_relative_paths):
|
def _process_deps_entries(deps_globals, child_deps_tree, child_path, deps_use_relative_paths):
|
||||||
"""Helper for _get_child_deps_tree"""
|
"""Helper for _get_child_deps_tree"""
|
||||||
for dep_path_str, dep_value in deps_globals.get('deps', dict()).items():
|
for dep_path_str, dep_value in deps_globals.get('deps', {}).items():
|
||||||
url = _get_dep_value_url(deps_globals, dep_value)
|
url = _get_dep_value_url(deps_globals, dep_value)
|
||||||
if url is None:
|
if url is None:
|
||||||
continue
|
continue
|
||||||
@@ -200,7 +204,7 @@ def _process_deps_entries(deps_globals, child_deps_tree, child_path, deps_use_re
|
|||||||
grandchild_deps_tree = recursedeps_item_depsfile
|
grandchild_deps_tree = recursedeps_item_depsfile
|
||||||
if grandchild_deps_tree is None:
|
if grandchild_deps_tree is None:
|
||||||
# This dep is not recursive; i.e. it is fully loaded
|
# This dep is not recursive; i.e. it is fully loaded
|
||||||
grandchild_deps_tree = dict()
|
grandchild_deps_tree = {}
|
||||||
child_deps_tree[dep_path] = (*url.split('@'), grandchild_deps_tree)
|
child_deps_tree[dep_path] = (*url.split('@'), grandchild_deps_tree)
|
||||||
|
|
||||||
|
|
||||||
@@ -211,7 +215,7 @@ def _get_child_deps_tree(download_session, current_deps_tree, child_path, deps_u
|
|||||||
# Load unloaded DEPS
|
# Load unloaded DEPS
|
||||||
deps_globals = _parse_deps(
|
deps_globals = _parse_deps(
|
||||||
_download_googlesource_file(download_session, repo_url, version, child_deps_tree))
|
_download_googlesource_file(download_session, repo_url, version, child_deps_tree))
|
||||||
child_deps_tree = dict()
|
child_deps_tree = {}
|
||||||
current_deps_tree[child_path] = (repo_url, version, child_deps_tree)
|
current_deps_tree[child_path] = (repo_url, version, child_deps_tree)
|
||||||
deps_use_relative_paths = deps_globals.get('use_relative_paths', False)
|
deps_use_relative_paths = deps_globals.get('use_relative_paths', False)
|
||||||
_process_deps_entries(deps_globals, child_deps_tree, child_path, deps_use_relative_paths)
|
_process_deps_entries(deps_globals, child_deps_tree, child_path, deps_use_relative_paths)
|
||||||
@@ -221,9 +225,8 @@ def _get_child_deps_tree(download_session, current_deps_tree, child_path, deps_u
|
|||||||
def _get_last_chromium_modification():
|
def _get_last_chromium_modification():
|
||||||
"""Returns the last modification date of the chromium-browser-official tar file"""
|
"""Returns the last modification date of the chromium-browser-official tar file"""
|
||||||
with _get_requests_session() as session:
|
with _get_requests_session() as session:
|
||||||
response = session.head(
|
response = session.head('https://storage.googleapis.com/chromium-browser-official/'
|
||||||
'https://storage.googleapis.com/chromium-browser-official/chromium-{}.tar.xz'.format(
|
f'chromium-{get_chromium_version()}.tar.xz')
|
||||||
get_chromium_version()))
|
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
return email.utils.parsedate_to_datetime(response.headers['Last-Modified'])
|
return email.utils.parsedate_to_datetime(response.headers['Last-Modified'])
|
||||||
|
|
||||||
@@ -235,7 +238,7 @@ def _get_gitiles_git_log_date(log_entry):
|
|||||||
|
|
||||||
def _get_gitiles_commit_before_date(repo_url, target_branch, target_datetime):
|
def _get_gitiles_commit_before_date(repo_url, target_branch, target_datetime):
|
||||||
"""Returns the hexadecimal hash of the closest commit before target_datetime"""
|
"""Returns the hexadecimal hash of the closest commit before target_datetime"""
|
||||||
json_log_url = '{repo}/+log/{branch}?format=JSON'.format(repo=repo_url, branch=target_branch)
|
json_log_url = f'{repo_url}/+log/{target_branch}?format=JSON'
|
||||||
with _get_requests_session() as session:
|
with _get_requests_session() as session:
|
||||||
response = session.get(json_log_url)
|
response = session.get(json_log_url)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
@@ -410,7 +413,7 @@ def _retrieve_remote_files(file_iter):
|
|||||||
Returns a dict of relative UNIX path strings to a list of lines in the file as strings
|
Returns a dict of relative UNIX path strings to a list of lines in the file as strings
|
||||||
"""
|
"""
|
||||||
|
|
||||||
files = dict()
|
files = {}
|
||||||
|
|
||||||
root_deps_tree = _initialize_deps_tree()
|
root_deps_tree = _initialize_deps_tree()
|
||||||
|
|
||||||
@@ -459,7 +462,7 @@ def _retrieve_local_files(file_iter, source_dir):
|
|||||||
|
|
||||||
Returns a dict of relative UNIX path strings to a list of lines in the file as strings
|
Returns a dict of relative UNIX path strings to a list of lines in the file as strings
|
||||||
"""
|
"""
|
||||||
files = dict()
|
files = {}
|
||||||
for file_path in file_iter:
|
for file_path in file_iter:
|
||||||
try:
|
try:
|
||||||
raw_content = (source_dir / file_path).read_bytes()
|
raw_content = (source_dir / file_path).read_bytes()
|
||||||
@@ -473,7 +476,7 @@ def _retrieve_local_files(file_iter, source_dir):
|
|||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
continue
|
continue
|
||||||
if not content:
|
if not content:
|
||||||
raise UnicodeDecodeError('Unable to decode with any encoding: %s' % file_path)
|
raise UnicodeDecodeError(f'Unable to decode with any encoding: {file_path}')
|
||||||
files[file_path] = content.split('\n')
|
files[file_path] = content.split('\n')
|
||||||
if not files:
|
if not files:
|
||||||
get_logger().error('All files used by patches are missing!')
|
get_logger().error('All files used by patches are missing!')
|
||||||
@@ -488,7 +491,7 @@ def _modify_file_lines(patched_file, file_lines):
|
|||||||
for hunk in patched_file:
|
for hunk in patched_file:
|
||||||
# Validate hunk will match
|
# Validate hunk will match
|
||||||
if not hunk.is_valid():
|
if not hunk.is_valid():
|
||||||
raise _PatchValidationError('Hunk is not valid: {}'.format(repr(hunk)))
|
raise _PatchValidationError(f'Hunk is not valid: {repr(hunk)}')
|
||||||
line_cursor = hunk.target_start - 1
|
line_cursor = hunk.target_start - 1
|
||||||
for line in hunk:
|
for line in hunk:
|
||||||
normalized_line = line.value.rstrip('\n')
|
normalized_line = line.value.rstrip('\n')
|
||||||
@@ -497,18 +500,16 @@ def _modify_file_lines(patched_file, file_lines):
|
|||||||
line_cursor += 1
|
line_cursor += 1
|
||||||
elif line.is_removed:
|
elif line.is_removed:
|
||||||
if normalized_line != file_lines[line_cursor]:
|
if normalized_line != file_lines[line_cursor]:
|
||||||
raise _PatchValidationError(
|
raise _PatchValidationError(f"Line '{file_lines[line_cursor]}' does not match "
|
||||||
"Line '{}' does not match removal line '{}' from patch".format(
|
f"removal line '{normalized_line}' from patch")
|
||||||
file_lines[line_cursor], normalized_line))
|
|
||||||
del file_lines[line_cursor]
|
del file_lines[line_cursor]
|
||||||
elif line.is_context:
|
elif line.is_context:
|
||||||
if not normalized_line and line_cursor == len(file_lines):
|
if not normalized_line and line_cursor == len(file_lines):
|
||||||
# We reached the end of the file
|
# We reached the end of the file
|
||||||
break
|
break
|
||||||
if normalized_line != file_lines[line_cursor]:
|
if normalized_line != file_lines[line_cursor]:
|
||||||
raise _PatchValidationError(
|
raise _PatchValidationError(f"Line '{file_lines[line_cursor]}' does not match "
|
||||||
"Line '{}' does not match context line '{}' from patch".format(
|
f"context line '{normalized_line}' from patch")
|
||||||
file_lines[line_cursor], normalized_line))
|
|
||||||
line_cursor += 1
|
line_cursor += 1
|
||||||
else:
|
else:
|
||||||
assert line.line_type in (LINE_TYPE_EMPTY, LINE_TYPE_NO_NEWLINE)
|
assert line.line_type in (LINE_TYPE_EMPTY, LINE_TYPE_NO_NEWLINE)
|
||||||
@@ -592,7 +593,7 @@ def _load_all_patches(series_iter, patches_dir):
|
|||||||
- dict of relative UNIX path strings to unidiff.PatchSet
|
- dict of relative UNIX path strings to unidiff.PatchSet
|
||||||
"""
|
"""
|
||||||
had_failure = False
|
had_failure = False
|
||||||
unidiff_dict = dict()
|
unidiff_dict = {}
|
||||||
for relative_path in series_iter:
|
for relative_path in series_iter:
|
||||||
if relative_path in unidiff_dict:
|
if relative_path in unidiff_dict:
|
||||||
continue
|
continue
|
||||||
@@ -682,12 +683,12 @@ def main():
|
|||||||
if args.cache_remote.parent.exists():
|
if args.cache_remote.parent.exists():
|
||||||
args.cache_remote.mkdir()
|
args.cache_remote.mkdir()
|
||||||
else:
|
else:
|
||||||
parser.error('Parent of cache path {} does not exist'.format(args.cache_remote))
|
parser.error(f'Parent of cache path {args.cache_remote} does not exist')
|
||||||
|
|
||||||
if not args.series.is_file():
|
if not args.series.is_file():
|
||||||
parser.error('--series path is not a file or not found: {}'.format(args.series))
|
parser.error(f'--series path is not a file or not found: {args.series}')
|
||||||
if not args.patches.is_dir():
|
if not args.patches.is_dir():
|
||||||
parser.error('--patches path is not a directory or not found: {}'.format(args.patches))
|
parser.error(f'--patches path is not a directory or not found: {args.patches}')
|
||||||
|
|
||||||
series_iterable = tuple(parse_series(args.series))
|
series_iterable = tuple(parse_series(args.series))
|
||||||
had_failure, patch_cache = _load_all_patches(series_iterable, args.patches)
|
had_failure, patch_cache = _load_all_patches(series_iterable, args.patches)
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ For new flags, first add a constant to `third_party/ungoogled/ungoogled_switches
|
|||||||
|
|
||||||
## Workflow of updating to a new Chromium version
|
## Workflow of updating to a new Chromium version
|
||||||
|
|
||||||
Tested on Debian 10 (buster). Exact instructions should work on any other Linux or macOS system with the proper dependencies.
|
Tested on Debian 12 (bookworm). Exact instructions should work on any other Linux or macOS system with the proper dependencies.
|
||||||
|
|
||||||
To gain a deeper understanding of this updating process, have a read through [docs/design.md](design.md).
|
To gain a deeper understanding of this updating process, have a read through [docs/design.md](design.md).
|
||||||
|
|
||||||
@@ -33,7 +33,7 @@ To gain a deeper understanding of this updating process, have a read through [do
|
|||||||
* [`quilt`](http://savannah.nongnu.org/projects/quilt)
|
* [`quilt`](http://savannah.nongnu.org/projects/quilt)
|
||||||
* This is available in most (if not all) Linux distributions, and also Homebrew on macOS.
|
* This is available in most (if not all) Linux distributions, and also Homebrew on macOS.
|
||||||
* This utility facilitates most of the updating process, so it is important to learn how to use this. The manpage for quilt (as of early 2017) lacks an example of a workflow. There are multiple guides online, but [this guide from Debian](https://wiki.debian.org/UsingQuilt) and [the referenced guide on that page](https://raphaelhertzog.com/2012/08/08/how-to-use-quilt-to-manage-patches-in-debian-packages/) are the ones referenced in developing the current workflow.
|
* This utility facilitates most of the updating process, so it is important to learn how to use this. The manpage for quilt (as of early 2017) lacks an example of a workflow. There are multiple guides online, but [this guide from Debian](https://wiki.debian.org/UsingQuilt) and [the referenced guide on that page](https://raphaelhertzog.com/2012/08/08/how-to-use-quilt-to-manage-patches-in-debian-packages/) are the ones referenced in developing the current workflow.
|
||||||
* Python 3.9 or newer
|
* Python 3.10 or newer
|
||||||
* `httplib2` and `six` are also required if you wish to utilize a source clone instead of the source tarball.
|
* `httplib2` and `six` are also required if you wish to utilize a source clone instead of the source tarball.
|
||||||
|
|
||||||
### Downloading the source code
|
### Downloading the source code
|
||||||
|
|||||||
@@ -36,6 +36,7 @@ class ExtractorEnum: #pylint: disable=too-few-public-methods
|
|||||||
|
|
||||||
class SetLogLevel(argparse.Action): #pylint: disable=too-few-public-methods
|
class SetLogLevel(argparse.Action): #pylint: disable=too-few-public-methods
|
||||||
"""Sets logging level based on command line arguments it receives"""
|
"""Sets logging level based on command line arguments it receives"""
|
||||||
|
|
||||||
def __init__(self, option_strings, dest, nargs=None, **kwargs):
|
def __init__(self, option_strings, dest, nargs=None, **kwargs):
|
||||||
super().__init__(option_strings, dest, nargs=nargs, **kwargs)
|
super().__init__(option_strings, dest, nargs=nargs, **kwargs)
|
||||||
|
|
||||||
|
|||||||
@@ -80,7 +80,7 @@ def _process_relative_to(unpack_root, relative_to):
|
|||||||
if not relative_root.is_dir():
|
if not relative_root.is_dir():
|
||||||
get_logger().error('Could not find relative_to directory in extracted files: %s',
|
get_logger().error('Could not find relative_to directory in extracted files: %s',
|
||||||
relative_to)
|
relative_to)
|
||||||
raise Exception()
|
raise FileNotFoundError()
|
||||||
for src_path in relative_root.iterdir():
|
for src_path in relative_root.iterdir():
|
||||||
dest_path = unpack_root / src_path.name
|
dest_path = unpack_root / src_path.name
|
||||||
src_path.rename(dest_path)
|
src_path.rename(dest_path)
|
||||||
@@ -92,20 +92,20 @@ def _extract_tar_with_7z(binary, archive_path, output_dir, relative_to):
|
|||||||
if not relative_to is None and (output_dir / relative_to).exists():
|
if not relative_to is None and (output_dir / relative_to).exists():
|
||||||
get_logger().error('Temporary unpacking directory already exists: %s',
|
get_logger().error('Temporary unpacking directory already exists: %s',
|
||||||
output_dir / relative_to)
|
output_dir / relative_to)
|
||||||
raise Exception()
|
raise FileExistsError()
|
||||||
cmd1 = (binary, 'x', str(archive_path), '-so')
|
cmd1 = (binary, 'x', str(archive_path), '-so')
|
||||||
cmd2 = (binary, 'x', '-si', '-aoa', '-ttar', '-o{}'.format(str(output_dir)))
|
cmd2 = (binary, 'x', '-si', '-aoa', '-ttar', f'-o{str(output_dir)}')
|
||||||
get_logger().debug('7z command line: %s | %s', ' '.join(cmd1), ' '.join(cmd2))
|
get_logger().debug('7z command line: %s | %s', ' '.join(cmd1), ' '.join(cmd2))
|
||||||
|
|
||||||
proc1 = subprocess.Popen(cmd1, stdout=subprocess.PIPE)
|
proc1 = subprocess.Popen(cmd1, stdout=subprocess.PIPE) #pylint: disable=consider-using-with
|
||||||
proc2 = subprocess.Popen(cmd2, stdin=proc1.stdout, stdout=subprocess.PIPE)
|
proc2 = subprocess.Popen(cmd2, stdin=proc1.stdout, stdout=subprocess.PIPE) #pylint: disable=consider-using-with
|
||||||
proc1.stdout.close()
|
proc1.stdout.close()
|
||||||
(stdout_data, stderr_data) = proc2.communicate()
|
(stdout_data, stderr_data) = proc2.communicate()
|
||||||
if proc2.returncode != 0:
|
if proc2.returncode != 0:
|
||||||
get_logger().error('7z commands returned non-zero status: %s', proc2.returncode)
|
get_logger().error('7z commands returned non-zero status: %s', proc2.returncode)
|
||||||
get_logger().debug('stdout: %s', stdout_data)
|
get_logger().debug('stdout: %s', stdout_data)
|
||||||
get_logger().debug('stderr: %s', stderr_data)
|
get_logger().debug('stderr: %s', stderr_data)
|
||||||
raise Exception()
|
raise ChildProcessError()
|
||||||
|
|
||||||
_process_relative_to(output_dir, relative_to)
|
_process_relative_to(output_dir, relative_to)
|
||||||
|
|
||||||
@@ -118,7 +118,7 @@ def _extract_tar_with_tar(binary, archive_path, output_dir, relative_to):
|
|||||||
result = subprocess.run(cmd, check=False)
|
result = subprocess.run(cmd, check=False)
|
||||||
if result.returncode != 0:
|
if result.returncode != 0:
|
||||||
get_logger().error('tar command returned %s', result.returncode)
|
get_logger().error('tar command returned %s', result.returncode)
|
||||||
raise Exception()
|
raise ChildProcessError()
|
||||||
|
|
||||||
# for gnu tar, the --transform option could be used. but to keep compatibility with
|
# for gnu tar, the --transform option could be used. but to keep compatibility with
|
||||||
# bsdtar on macos, we just do this ourselves
|
# bsdtar on macos, we just do this ourselves
|
||||||
@@ -133,7 +133,7 @@ def _extract_tar_with_winrar(binary, archive_path, output_dir, relative_to):
|
|||||||
result = subprocess.run(cmd, check=False)
|
result = subprocess.run(cmd, check=False)
|
||||||
if result.returncode != 0:
|
if result.returncode != 0:
|
||||||
get_logger().error('WinRAR command returned %s', result.returncode)
|
get_logger().error('WinRAR command returned %s', result.returncode)
|
||||||
raise Exception()
|
raise ChildProcessError()
|
||||||
|
|
||||||
_process_relative_to(output_dir, relative_to)
|
_process_relative_to(output_dir, relative_to)
|
||||||
|
|
||||||
@@ -143,10 +143,12 @@ def _extract_tar_with_python(archive_path, output_dir, relative_to):
|
|||||||
|
|
||||||
class NoAppendList(list):
|
class NoAppendList(list):
|
||||||
"""Hack to workaround memory issues with large tar files"""
|
"""Hack to workaround memory issues with large tar files"""
|
||||||
|
|
||||||
def append(self, obj):
|
def append(self, obj):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# Simple hack to check if symlinks are supported
|
# Simple hack to check if symlinks are supported
|
||||||
|
symlink_supported = False
|
||||||
try:
|
try:
|
||||||
os.symlink('', '')
|
os.symlink('', '')
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
@@ -155,13 +157,12 @@ def _extract_tar_with_python(archive_path, output_dir, relative_to):
|
|||||||
except OSError:
|
except OSError:
|
||||||
# Symlinks probably not supported
|
# Symlinks probably not supported
|
||||||
get_logger().info('System does not support symlinks. Ignoring them.')
|
get_logger().info('System does not support symlinks. Ignoring them.')
|
||||||
symlink_supported = False
|
|
||||||
except BaseException:
|
except BaseException:
|
||||||
# Unexpected exception
|
# Unexpected exception
|
||||||
get_logger().exception('Unexpected exception during symlink support check.')
|
get_logger().exception('Unexpected exception during symlink support check.')
|
||||||
raise
|
raise
|
||||||
|
|
||||||
with tarfile.open(str(archive_path), 'r|%s' % archive_path.suffix[1:]) as tar_file_obj:
|
with tarfile.open(str(archive_path), f'r|{archive_path.suffix[1:]}') as tar_file_obj:
|
||||||
tar_file_obj.members = NoAppendList()
|
tar_file_obj.members = NoAppendList()
|
||||||
for tarinfo in tar_file_obj:
|
for tarinfo in tar_file_obj:
|
||||||
try:
|
try:
|
||||||
@@ -258,21 +259,21 @@ def extract_with_7z(archive_path, output_dir, relative_to, extractors=None):
|
|||||||
if sevenzip_cmd == USE_REGISTRY:
|
if sevenzip_cmd == USE_REGISTRY:
|
||||||
if not get_running_platform() == PlatformEnum.WINDOWS:
|
if not get_running_platform() == PlatformEnum.WINDOWS:
|
||||||
get_logger().error('"%s" for 7-zip is only available on Windows', sevenzip_cmd)
|
get_logger().error('"%s" for 7-zip is only available on Windows', sevenzip_cmd)
|
||||||
raise Exception()
|
raise EnvironmentError()
|
||||||
sevenzip_cmd = str(_find_7z_by_registry())
|
sevenzip_cmd = str(_find_7z_by_registry())
|
||||||
sevenzip_bin = _find_extractor_by_cmd(sevenzip_cmd)
|
sevenzip_bin = _find_extractor_by_cmd(sevenzip_cmd)
|
||||||
|
|
||||||
if not relative_to is None and (output_dir / relative_to).exists():
|
if not relative_to is None and (output_dir / relative_to).exists():
|
||||||
get_logger().error('Temporary unpacking directory already exists: %s',
|
get_logger().error('Temporary unpacking directory already exists: %s',
|
||||||
output_dir / relative_to)
|
output_dir / relative_to)
|
||||||
raise Exception()
|
raise FileExistsError()
|
||||||
cmd = (sevenzip_bin, 'x', str(archive_path), '-aoa', '-o{}'.format(str(output_dir)))
|
cmd = (sevenzip_bin, 'x', str(archive_path), '-aoa', f'-o{str(output_dir)}')
|
||||||
get_logger().debug('7z command line: %s', ' '.join(cmd))
|
get_logger().debug('7z command line: %s', ' '.join(cmd))
|
||||||
|
|
||||||
result = subprocess.run(cmd, check=False)
|
result = subprocess.run(cmd, check=False)
|
||||||
if result.returncode != 0:
|
if result.returncode != 0:
|
||||||
get_logger().error('7z command returned %s', result.returncode)
|
get_logger().error('7z command returned %s', result.returncode)
|
||||||
raise Exception()
|
raise ChildProcessError()
|
||||||
|
|
||||||
_process_relative_to(output_dir, relative_to)
|
_process_relative_to(output_dir, relative_to)
|
||||||
|
|
||||||
@@ -296,20 +297,20 @@ def extract_with_winrar(archive_path, output_dir, relative_to, extractors=None):
|
|||||||
if winrar_cmd == USE_REGISTRY:
|
if winrar_cmd == USE_REGISTRY:
|
||||||
if not get_running_platform() == PlatformEnum.WINDOWS:
|
if not get_running_platform() == PlatformEnum.WINDOWS:
|
||||||
get_logger().error('"%s" for WinRAR is only available on Windows', winrar_cmd)
|
get_logger().error('"%s" for WinRAR is only available on Windows', winrar_cmd)
|
||||||
raise Exception()
|
raise EnvironmentError()
|
||||||
winrar_cmd = str(_find_winrar_by_registry())
|
winrar_cmd = str(_find_winrar_by_registry())
|
||||||
winrar_bin = _find_extractor_by_cmd(winrar_cmd)
|
winrar_bin = _find_extractor_by_cmd(winrar_cmd)
|
||||||
|
|
||||||
if not relative_to is None and (output_dir / relative_to).exists():
|
if not relative_to is None and (output_dir / relative_to).exists():
|
||||||
get_logger().error('Temporary unpacking directory already exists: %s',
|
get_logger().error('Temporary unpacking directory already exists: %s',
|
||||||
output_dir / relative_to)
|
output_dir / relative_to)
|
||||||
raise Exception()
|
raise FileExistsError()
|
||||||
cmd = (winrar_bin, 'x', '-o+', str(archive_path), str(output_dir))
|
cmd = (winrar_bin, 'x', '-o+', str(archive_path), str(output_dir))
|
||||||
get_logger().debug('WinRAR command line: %s', ' '.join(cmd))
|
get_logger().debug('WinRAR command line: %s', ' '.join(cmd))
|
||||||
|
|
||||||
result = subprocess.run(cmd, check=False)
|
result = subprocess.run(cmd, check=False)
|
||||||
if result.returncode != 0:
|
if result.returncode != 0:
|
||||||
get_logger().error('WinRAR command returned %s', result.returncode)
|
get_logger().error('WinRAR command returned %s', result.returncode)
|
||||||
raise Exception()
|
raise ChildProcessError()
|
||||||
|
|
||||||
_process_relative_to(output_dir, relative_to)
|
_process_relative_to(output_dir, relative_to)
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ from shutil import copytree, copy, move
|
|||||||
from stat import S_IWRITE
|
from stat import S_IWRITE
|
||||||
from subprocess import run
|
from subprocess import run
|
||||||
|
|
||||||
from _common import add_common_params, get_chromium_version, get_logger
|
from _common import ENCODING, add_common_params, get_chromium_version, get_logger
|
||||||
|
|
||||||
# Config file for gclient
|
# Config file for gclient
|
||||||
# Instances of 'src' replaced with UC_OUT, which will be replaced with the output directory
|
# Instances of 'src' replaced with UC_OUT, which will be replaced with the output directory
|
||||||
@@ -53,7 +53,7 @@ def clone(args): # pylint: disable=too-many-branches, too-many-locals, too-many-
|
|||||||
ucstaging = args.output / 'uc_staging'
|
ucstaging = args.output / 'uc_staging'
|
||||||
dtpath = ucstaging / 'depot_tools'
|
dtpath = ucstaging / 'depot_tools'
|
||||||
gsuver = '5.30'
|
gsuver = '5.30'
|
||||||
gsupath = dtpath / 'external_bin' / 'gsutil' / ('gsutil_%s' % gsuver) / 'gsutil'
|
gsupath = dtpath / 'external_bin' / 'gsutil' / f'gsutil_{gsuver}' / 'gsutil'
|
||||||
gnpath = ucstaging / 'gn'
|
gnpath = ucstaging / 'gn'
|
||||||
environ['GCLIENT_FILE'] = str(ucstaging / '.gclient')
|
environ['GCLIENT_FILE'] = str(ucstaging / '.gclient')
|
||||||
environ['PATH'] += pathsep + str(dtpath)
|
environ['PATH'] += pathsep + str(dtpath)
|
||||||
@@ -88,7 +88,7 @@ def clone(args): # pylint: disable=too-many-branches, too-many-locals, too-many-
|
|||||||
|
|
||||||
get_logger().info('Cloning depot_tools')
|
get_logger().info('Cloning depot_tools')
|
||||||
dt_commit = re.search(r"depot_tools\.git'\s*\+\s*'@'\s*\+\s*'([^']+)',",
|
dt_commit = re.search(r"depot_tools\.git'\s*\+\s*'@'\s*\+\s*'([^']+)',",
|
||||||
Path(args.output / 'DEPS').read_text()).group(1)
|
Path(args.output / 'DEPS').read_text(encoding=ENCODING)).group(1)
|
||||||
if not dt_commit:
|
if not dt_commit:
|
||||||
get_logger().error('Unable to obtain commit for depot_tools checkout')
|
get_logger().error('Unable to obtain commit for depot_tools checkout')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
@@ -108,7 +108,7 @@ def clone(args): # pylint: disable=too-many-branches, too-many-locals, too-many-
|
|||||||
(dtpath / 'git.bat').write_text('git')
|
(dtpath / 'git.bat').write_text('git')
|
||||||
# Apply changes to gclient
|
# Apply changes to gclient
|
||||||
run(['git', 'apply', '--ignore-whitespace'],
|
run(['git', 'apply', '--ignore-whitespace'],
|
||||||
input=Path(__file__).with_name('depot_tools.patch').read_text().replace(
|
input=Path(__file__).with_name('depot_tools.patch').read_text(encoding=ENCODING).replace(
|
||||||
'UC_OUT', str(args.output)).replace('UC_STAGING',
|
'UC_OUT', str(args.output)).replace('UC_STAGING',
|
||||||
str(ucstaging)).replace('GSUVER', gsuver),
|
str(ucstaging)).replace('GSUVER', gsuver),
|
||||||
cwd=dtpath,
|
cwd=dtpath,
|
||||||
@@ -123,7 +123,7 @@ def clone(args): # pylint: disable=too-many-branches, too-many-locals, too-many-
|
|||||||
run(['git', 'remote', 'add', 'origin', 'https://github.com/GoogleCloudPlatform/gsutil'],
|
run(['git', 'remote', 'add', 'origin', 'https://github.com/GoogleCloudPlatform/gsutil'],
|
||||||
cwd=gsupath,
|
cwd=gsupath,
|
||||||
check=True)
|
check=True)
|
||||||
run(['git', 'fetch', '--depth=1', 'origin', 'v%s' % gsuver], cwd=gsupath, check=True)
|
run(['git', 'fetch', '--depth=1', 'origin', f'v{gsuver}'], cwd=gsupath, check=True)
|
||||||
run(['git', 'reset', '--hard', 'FETCH_HEAD'], cwd=gsupath, check=True)
|
run(['git', 'reset', '--hard', 'FETCH_HEAD'], cwd=gsupath, check=True)
|
||||||
run(['git', 'clean', '-ffdx'], cwd=gsupath, check=True)
|
run(['git', 'clean', '-ffdx'], cwd=gsupath, check=True)
|
||||||
get_logger().info('Updating gsutil submodules')
|
get_logger().info('Updating gsutil submodules')
|
||||||
@@ -142,7 +142,7 @@ def clone(args): # pylint: disable=too-many-branches, too-many-locals, too-many-
|
|||||||
# gn requires full history to be able to generate last_commit_position.h
|
# gn requires full history to be able to generate last_commit_position.h
|
||||||
get_logger().info('Cloning gn')
|
get_logger().info('Cloning gn')
|
||||||
gn_commit = re.search(r"gn_version': 'git_revision:([^']+)',",
|
gn_commit = re.search(r"gn_version': 'git_revision:([^']+)',",
|
||||||
Path(args.output / 'DEPS').read_text()).group(1)
|
Path(args.output / 'DEPS').read_text(encoding=ENCODING)).group(1)
|
||||||
if not gn_commit:
|
if not gn_commit:
|
||||||
get_logger().error('Unable to obtain commit for gn checkout')
|
get_logger().error('Unable to obtain commit for gn checkout')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
@@ -226,7 +226,7 @@ def clone(args): # pylint: disable=too-many-branches, too-many-locals, too-many-
|
|||||||
for item in gnpath.iterdir():
|
for item in gnpath.iterdir():
|
||||||
if not item.is_dir():
|
if not item.is_dir():
|
||||||
copy(item, args.output / 'tools' / 'gn')
|
copy(item, args.output / 'tools' / 'gn')
|
||||||
elif item.name != '.git' and item.name != 'out':
|
elif item.name not in ('.git', 'out'):
|
||||||
copytree(item, args.output / 'tools' / 'gn' / item.name)
|
copytree(item, args.output / 'tools' / 'gn' / item.name)
|
||||||
move(str(gnpath / 'out' / 'last_commit_position.h'),
|
move(str(gnpath / 'out' / 'last_commit_position.h'),
|
||||||
str(args.output / 'tools' / 'gn' / 'bootstrap'))
|
str(args.output / 'tools' / 'gn' / 'bootstrap'))
|
||||||
|
|||||||
@@ -106,7 +106,7 @@ def _substitute_path(path, regex_iter):
|
|||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
continue
|
continue
|
||||||
if not content:
|
if not content:
|
||||||
raise UnicodeDecodeError('Unable to decode with any encoding: %s' % path)
|
raise UnicodeDecodeError(f'Unable to decode with any encoding: {path}')
|
||||||
file_subs = 0
|
file_subs = 0
|
||||||
for regex_pair in regex_iter:
|
for regex_pair in regex_iter:
|
||||||
content, sub_count = regex_pair.pattern.subn(regex_pair.replacement, content)
|
content, sub_count = regex_pair.pattern.subn(regex_pair.replacement, content)
|
||||||
@@ -206,17 +206,17 @@ def apply_substitution(regex_path, files_path, source_tree, domainsub_cache):
|
|||||||
resolved_tree = source_tree.resolve()
|
resolved_tree = source_tree.resolve()
|
||||||
regex_pairs = DomainRegexList(regex_path).regex_pairs
|
regex_pairs = DomainRegexList(regex_path).regex_pairs
|
||||||
fileindex_content = io.BytesIO()
|
fileindex_content = io.BytesIO()
|
||||||
with tarfile.open(str(domainsub_cache), 'w:%s' % domainsub_cache.suffix[1:],
|
with tarfile.open(str(domainsub_cache), f'w:{domainsub_cache.suffix[1:]}',
|
||||||
compresslevel=1) if domainsub_cache else open(os.devnull, 'w') as cache_tar:
|
compresslevel=1) if domainsub_cache else open(
|
||||||
|
os.devnull, 'w', encoding=ENCODING) as cache_tar:
|
||||||
for relative_path in filter(len, files_path.read_text().splitlines()):
|
for relative_path in filter(len, files_path.read_text().splitlines()):
|
||||||
if _INDEX_HASH_DELIMITER in relative_path:
|
if _INDEX_HASH_DELIMITER in relative_path:
|
||||||
if domainsub_cache:
|
if domainsub_cache:
|
||||||
# Cache tar will be incomplete; remove it for convenience
|
# Cache tar will be incomplete; remove it for convenience
|
||||||
cache_tar.close()
|
cache_tar.close()
|
||||||
domainsub_cache.unlink()
|
domainsub_cache.unlink()
|
||||||
raise ValueError(
|
raise ValueError(f'Path "{relative_path}" contains '
|
||||||
'Path "%s" contains the file index hash delimiter "%s"' % relative_path,
|
f'the file index hash delimiter "{_INDEX_HASH_DELIMITER}"')
|
||||||
_INDEX_HASH_DELIMITER)
|
|
||||||
path = resolved_tree / relative_path
|
path = resolved_tree / relative_path
|
||||||
if not path.exists():
|
if not path.exists():
|
||||||
get_logger().warning('Skipping non-existant path: %s', path)
|
get_logger().warning('Skipping non-existant path: %s', path)
|
||||||
@@ -230,8 +230,8 @@ def apply_substitution(regex_path, files_path, source_tree, domainsub_cache):
|
|||||||
get_logger().info('Path has no substitutions: %s', relative_path)
|
get_logger().info('Path has no substitutions: %s', relative_path)
|
||||||
continue
|
continue
|
||||||
if domainsub_cache:
|
if domainsub_cache:
|
||||||
fileindex_content.write('{}{}{:08x}\n'.format(relative_path, _INDEX_HASH_DELIMITER,
|
fileindex_content.write(
|
||||||
crc32_hash).encode(ENCODING))
|
f'{relative_path}{_INDEX_HASH_DELIMITER}{crc32_hash:08x}\n'.encode(ENCODING))
|
||||||
orig_tarinfo = tarfile.TarInfo(str(Path(_ORIG_DIR) / relative_path))
|
orig_tarinfo = tarfile.TarInfo(str(Path(_ORIG_DIR) / relative_path))
|
||||||
orig_tarinfo.size = len(orig_content)
|
orig_tarinfo.size = len(orig_content)
|
||||||
with io.BytesIO(orig_content) as orig_file:
|
with io.BytesIO(orig_content) as orig_file:
|
||||||
|
|||||||
@@ -25,6 +25,7 @@ from _extraction import extract_tar_file, extract_with_7z, extract_with_winrar
|
|||||||
|
|
||||||
sys.path.insert(0, str(Path(__file__).parent / 'third_party'))
|
sys.path.insert(0, str(Path(__file__).parent / 'third_party'))
|
||||||
import schema #pylint: disable=wrong-import-position, wrong-import-order
|
import schema #pylint: disable=wrong-import-position, wrong-import-order
|
||||||
|
|
||||||
sys.path.pop(0)
|
sys.path.pop(0)
|
||||||
|
|
||||||
# Constants
|
# Constants
|
||||||
@@ -74,6 +75,7 @@ class DownloadInfo: #pylint: disable=too-few-public-methods
|
|||||||
})
|
})
|
||||||
|
|
||||||
class _DownloadsProperties: #pylint: disable=too-few-public-methods
|
class _DownloadsProperties: #pylint: disable=too-few-public-methods
|
||||||
|
|
||||||
def __init__(self, section_dict, passthrough_properties, hashes):
|
def __init__(self, section_dict, passthrough_properties, hashes):
|
||||||
self._section_dict = section_dict
|
self._section_dict = section_dict
|
||||||
self._passthrough_properties = passthrough_properties
|
self._passthrough_properties = passthrough_properties
|
||||||
@@ -97,7 +99,7 @@ class DownloadInfo: #pylint: disable=too-few-public-methods
|
|||||||
value = value.split(DownloadInfo.hash_url_delimiter)
|
value = value.split(DownloadInfo.hash_url_delimiter)
|
||||||
hashes_dict[hash_name] = value
|
hashes_dict[hash_name] = value
|
||||||
return hashes_dict
|
return hashes_dict
|
||||||
raise AttributeError('"{}" has no attribute "{}"'.format(type(self).__name__, name))
|
raise AttributeError(f'"{type(self).__name__}" has no attribute "{name}"')
|
||||||
|
|
||||||
def _parse_data(self, path):
|
def _parse_data(self, path):
|
||||||
"""
|
"""
|
||||||
@@ -105,6 +107,7 @@ class DownloadInfo: #pylint: disable=too-few-public-methods
|
|||||||
|
|
||||||
Raises schema.SchemaError if validation fails
|
Raises schema.SchemaError if validation fails
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _section_generator(data):
|
def _section_generator(data):
|
||||||
for section in data:
|
for section in data:
|
||||||
if section == configparser.DEFAULTSECT:
|
if section == configparser.DEFAULTSECT:
|
||||||
@@ -157,11 +160,12 @@ class DownloadInfo: #pylint: disable=too-few-public-methods
|
|||||||
return
|
return
|
||||||
for name in section_names:
|
for name in section_names:
|
||||||
if name not in self:
|
if name not in self:
|
||||||
raise KeyError('"{}" has no section "{}"'.format(type(self).__name__, name))
|
raise KeyError(f'"{type(self).__name__}" has no section "{name}"')
|
||||||
|
|
||||||
|
|
||||||
class _UrlRetrieveReportHook: #pylint: disable=too-few-public-methods
|
class _UrlRetrieveReportHook: #pylint: disable=too-few-public-methods
|
||||||
"""Hook for urllib.request.urlretrieve to log progress information to console"""
|
"""Hook for urllib.request.urlretrieve to log progress information to console"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self._max_len_printed = 0
|
self._max_len_printed = 0
|
||||||
self._last_percentage = None
|
self._last_percentage = None
|
||||||
@@ -181,10 +185,10 @@ class _UrlRetrieveReportHook: #pylint: disable=too-few-public-methods
|
|||||||
return
|
return
|
||||||
self._last_percentage = percentage
|
self._last_percentage = percentage
|
||||||
print('\r' + ' ' * self._max_len_printed, end='')
|
print('\r' + ' ' * self._max_len_printed, end='')
|
||||||
status_line = 'Progress: {:.1%} of {:,d} B'.format(percentage, total_size)
|
status_line = f'Progress: {percentage:.1%} of {total_size:,d} B'
|
||||||
else:
|
else:
|
||||||
downloaded_estimate = block_count * block_size
|
downloaded_estimate = block_count * block_size
|
||||||
status_line = 'Progress: {:,d} B of unknown size'.format(downloaded_estimate)
|
status_line = f'Progress: {downloaded_estimate:,d} B of unknown size'
|
||||||
self._max_len_printed = len(status_line)
|
self._max_len_printed = len(status_line)
|
||||||
print('\r' + status_line, end='')
|
print('\r' + status_line, end='')
|
||||||
|
|
||||||
@@ -259,7 +263,7 @@ def _get_hash_pairs(download_properties, cache_dir):
|
|||||||
if hash_processor == 'chromium':
|
if hash_processor == 'chromium':
|
||||||
yield from _chromium_hashes_generator(cache_dir / hash_filename)
|
yield from _chromium_hashes_generator(cache_dir / hash_filename)
|
||||||
else:
|
else:
|
||||||
raise ValueError('Unknown hash_url processor: %s' % hash_processor)
|
raise ValueError(f'Unknown hash_url processor: {hash_processor}')
|
||||||
else:
|
else:
|
||||||
yield entry_type, entry_value
|
yield entry_type, entry_value
|
||||||
|
|
||||||
|
|||||||
@@ -57,10 +57,10 @@ def _get_archive_writer(output_path, timestamp=None):
|
|||||||
timestamp is a file timestamp to use for all files, if set.
|
timestamp is a file timestamp to use for all files, if set.
|
||||||
"""
|
"""
|
||||||
if not output_path.suffixes:
|
if not output_path.suffixes:
|
||||||
raise ValueError('Output name has no suffix: %s' % output_path.name)
|
raise ValueError(f'Output name has no suffix: {output_path.name}')
|
||||||
if output_path.suffixes[-1].lower() == '.zip':
|
if output_path.suffixes[-1].lower() == '.zip':
|
||||||
archive_root = Path(output_path.stem)
|
archive_root = Path(output_path.stem)
|
||||||
output_archive = zipfile.ZipFile(str(output_path), 'w', zipfile.ZIP_DEFLATED)
|
output_archive = zipfile.ZipFile(str(output_path), 'w', zipfile.ZIP_DEFLATED) # pylint: disable=consider-using-with
|
||||||
zip_date_time = None
|
zip_date_time = None
|
||||||
if timestamp:
|
if timestamp:
|
||||||
zip_date_time = datetime.datetime.fromtimestamp(timestamp).timetuple()[:6]
|
zip_date_time = datetime.datetime.fromtimestamp(timestamp).timetuple()[:6]
|
||||||
@@ -83,17 +83,18 @@ def _get_archive_writer(output_path, timestamp=None):
|
|||||||
zip_write(str(in_path), str(arc_path))
|
zip_write(str(in_path), str(arc_path))
|
||||||
elif '.tar' in output_path.name.lower():
|
elif '.tar' in output_path.name.lower():
|
||||||
if len(output_path.suffixes) >= 2 and output_path.suffixes[-2].lower() == '.tar':
|
if len(output_path.suffixes) >= 2 and output_path.suffixes[-2].lower() == '.tar':
|
||||||
tar_mode = 'w:%s' % output_path.suffixes[-1][1:]
|
tar_mode = f'w:{output_path.suffixes[-1][1:]}'
|
||||||
archive_root = Path(output_path.with_suffix('').stem)
|
archive_root = Path(output_path.with_suffix('').stem)
|
||||||
elif output_path.suffixes[-1].lower() == '.tar':
|
elif output_path.suffixes[-1].lower() == '.tar':
|
||||||
tar_mode = 'w'
|
tar_mode = 'w'
|
||||||
archive_root = Path(output_path.stem)
|
archive_root = Path(output_path.stem)
|
||||||
else:
|
else:
|
||||||
raise ValueError('Could not detect tar format for output: %s' % output_path.name)
|
raise ValueError(f'Could not detect tar format for output: {output_path.name}')
|
||||||
if timestamp:
|
if timestamp:
|
||||||
|
|
||||||
class TarInfoFixedTimestamp(tarfile.TarInfo):
|
class TarInfoFixedTimestamp(tarfile.TarInfo):
|
||||||
"""TarInfo class with predefined constant mtime"""
|
"""TarInfo class with predefined constant mtime"""
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def mtime(self):
|
def mtime(self):
|
||||||
"""Return predefined timestamp"""
|
"""Return predefined timestamp"""
|
||||||
@@ -106,10 +107,13 @@ def _get_archive_writer(output_path, timestamp=None):
|
|||||||
tarinfo_class = TarInfoFixedTimestamp
|
tarinfo_class = TarInfoFixedTimestamp
|
||||||
else:
|
else:
|
||||||
tarinfo_class = tarfile.TarInfo
|
tarinfo_class = tarfile.TarInfo
|
||||||
output_archive = tarfile.open(str(output_path), tar_mode, tarinfo=tarinfo_class)
|
output_archive = tarfile.open(str(output_path), tar_mode, tarinfo=tarinfo_class) # pylint: disable=consider-using-with
|
||||||
add_func = lambda in_path, arc_path: output_archive.add(str(in_path), str(arc_path))
|
|
||||||
|
def add_func(in_path, arc_path):
|
||||||
|
"""Add files to tar archive"""
|
||||||
|
output_archive.add(str(in_path), str(arc_path))
|
||||||
else:
|
else:
|
||||||
raise ValueError('Unknown archive extension with name: %s' % output_path.name)
|
raise ValueError(f'Unknown archive extension with name: {output_path.name}')
|
||||||
return output_archive, add_func, archive_root
|
return output_archive, add_func, archive_root
|
||||||
|
|
||||||
|
|
||||||
@@ -147,7 +151,7 @@ def _files_generator_by_args(args):
|
|||||||
|
|
||||||
def _list_callback(args):
|
def _list_callback(args):
|
||||||
"""List files needed to run Chromium."""
|
"""List files needed to run Chromium."""
|
||||||
sys.stdout.writelines('%s\n' % x for x in _files_generator_by_args(args))
|
sys.stdout.writelines(f'{x}\n' for x in _files_generator_by_args(args))
|
||||||
|
|
||||||
|
|
||||||
def _archive_callback(args):
|
def _archive_callback(args):
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ Generate standalone script that performs the domain substitution.
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import argparse
|
import argparse
|
||||||
import re
|
import re
|
||||||
|
from _common import ENCODING
|
||||||
|
|
||||||
|
|
||||||
def make_domain_substitution_script(regex_path, files_path, output_path):
|
def make_domain_substitution_script(regex_path, files_path, output_path):
|
||||||
@@ -41,8 +42,8 @@ def make_domain_substitution_script(regex_path, files_path, output_path):
|
|||||||
files_list_str = '\n'.join(files_list)
|
files_list_str = '\n'.join(files_list)
|
||||||
perl_replace_list_str = '\n'.join([f' {x};' for x in perl_replace_list])
|
perl_replace_list_str = '\n'.join([f' {x};' for x in perl_replace_list])
|
||||||
|
|
||||||
with open(output_path, 'w') as out:
|
with open(output_path, 'w', encoding=ENCODING) as out:
|
||||||
out.write("""#!/bin/sh -e
|
out.write(f"""#!/bin/sh -e
|
||||||
#
|
#
|
||||||
# This script performs domain substitution on the Chromium source files.
|
# This script performs domain substitution on the Chromium source files.
|
||||||
#
|
#
|
||||||
@@ -54,25 +55,25 @@ def make_domain_substitution_script(regex_path, files_path, output_path):
|
|||||||
test -f build/config/compiler/BUILD.gn
|
test -f build/config/compiler/BUILD.gn
|
||||||
|
|
||||||
# These filenames may contain spaces and/or other unusual characters
|
# These filenames may contain spaces and/or other unusual characters
|
||||||
print_file_list() {
|
print_file_list() {{
|
||||||
cat <<'__END__'
|
cat <<'__END__'
|
||||||
%s
|
{files_list_str}
|
||||||
__END__
|
__END__
|
||||||
}
|
}}
|
||||||
|
|
||||||
echo "Creating backup archive ..."
|
echo "Creating backup archive ..."
|
||||||
|
|
||||||
backup=domain-substitution.orig.tar
|
backup=domain-substitution.orig.tar
|
||||||
print_file_list | tar cf $backup --verbatim-files-from --files-from=-
|
print_file_list | tar cf $backup --verbatim-files-from --files-from=-
|
||||||
|
|
||||||
echo "Applying ungoogled-chromium domain substitution to %d files ..."
|
echo "Applying ungoogled-chromium domain substitution to {len(files_list)} files ..."
|
||||||
|
|
||||||
print_file_list | xargs -d '\\n' perl -0777 -C0 -pwi -e '
|
print_file_list | xargs -d '\\n' perl -0777 -C0 -pwi -e '
|
||||||
%s
|
{perl_replace_list_str}
|
||||||
'
|
'
|
||||||
|
|
||||||
# end
|
# end
|
||||||
""" % (files_list_str, len(files_list), perl_replace_list_str))
|
""")
|
||||||
|
|
||||||
|
|
||||||
def _callback(args):
|
def _callback(args):
|
||||||
|
|||||||
@@ -60,7 +60,7 @@ def find_and_check_patch(patch_bin_path=None):
|
|||||||
raise ValueError('Could not find patch from PATCH_BIN env var or "which patch"')
|
raise ValueError('Could not find patch from PATCH_BIN env var or "which patch"')
|
||||||
|
|
||||||
if not patch_bin_path.exists():
|
if not patch_bin_path.exists():
|
||||||
raise ValueError('Could not find the patch binary: {}'.format(patch_bin_path))
|
raise ValueError(f'Could not find the patch binary: {patch_bin_path}')
|
||||||
|
|
||||||
# Ensure patch actually runs
|
# Ensure patch actually runs
|
||||||
cmd = [str(patch_bin_path), '--version']
|
cmd = [str(patch_bin_path), '--version']
|
||||||
@@ -73,7 +73,7 @@ def find_and_check_patch(patch_bin_path=None):
|
|||||||
get_logger().error('"%s" returned non-zero exit code', ' '.join(cmd))
|
get_logger().error('"%s" returned non-zero exit code', ' '.join(cmd))
|
||||||
get_logger().error('stdout:\n%s', result.stdout)
|
get_logger().error('stdout:\n%s', result.stdout)
|
||||||
get_logger().error('stderr:\n%s', result.stderr)
|
get_logger().error('stderr:\n%s', result.stderr)
|
||||||
raise RuntimeError('Got non-zero exit code running "{}"'.format(' '.join(cmd)))
|
raise RuntimeError(f"Got non-zero exit code running \"{' '.join(cmd)}\"")
|
||||||
|
|
||||||
return patch_bin_path
|
return patch_bin_path
|
||||||
|
|
||||||
@@ -167,18 +167,16 @@ def merge_patches(source_iter, destination, prepend=False):
|
|||||||
if prepend:
|
if prepend:
|
||||||
if not (destination / 'series').exists():
|
if not (destination / 'series').exists():
|
||||||
raise FileNotFoundError(
|
raise FileNotFoundError(
|
||||||
'Could not find series file in existing destination: {}'.format(destination /
|
f"Could not find series file in existing destination: {destination / 'series'}")
|
||||||
'series'))
|
|
||||||
known_paths.update(generate_patches_from_series(destination))
|
known_paths.update(generate_patches_from_series(destination))
|
||||||
else:
|
else:
|
||||||
raise FileExistsError('destination already exists: {}'.format(destination))
|
raise FileExistsError(f'destination already exists: {destination}')
|
||||||
for source_dir in source_iter:
|
for source_dir in source_iter:
|
||||||
patch_paths = tuple(generate_patches_from_series(source_dir))
|
patch_paths = tuple(generate_patches_from_series(source_dir))
|
||||||
patch_intersection = known_paths.intersection(patch_paths)
|
patch_intersection = known_paths.intersection(patch_paths)
|
||||||
if patch_intersection:
|
if patch_intersection:
|
||||||
raise FileExistsError(
|
raise FileExistsError(f'Patches from {source_dir} have conflicting paths '
|
||||||
'Patches from {} have conflicting paths with other sources: {}'.format(
|
f'with other sources: {patch_intersection}')
|
||||||
source_dir, patch_intersection))
|
|
||||||
series.extend(patch_paths)
|
series.extend(patch_paths)
|
||||||
_copy_files(patch_paths, source_dir, destination)
|
_copy_files(patch_paths, source_dir, destination)
|
||||||
if prepend and (destination / 'series').exists():
|
if prepend and (destination / 'series').exists():
|
||||||
|
|||||||
Reference in New Issue
Block a user