diff --git a/.github/workflows/infra_tests.yml b/.github/workflows/infra_tests.yml index 28aa56d3365e..98a5b91fbf06 100644 --- a/.github/workflows/infra_tests.yml +++ b/.github/workflows/infra_tests.yml @@ -25,13 +25,6 @@ jobs: sudo env "PATH=$PATH" pip install -r infra/ci/requirements.txt sudo env "PATH=$PATH" pip install -r infra/build/functions/requirements.txt - - name: Install python-apt - run: | - git clone https://salsa.debian.org/apt-team/python-apt.git -b 1.6.5ubuntu0.3 --depth 1 /tmp/python-apt - cd /tmp/python-apt - sudo env "PATH=$PATH" apt build-dep ./ - sudo env "PATH=$PATH" python setup.py install - - uses: GoogleCloudPlatform/github-actions/setup-gcloud@master with: version: '298.0.0' diff --git a/.pylintrc b/.pylintrc index 3b764856a5cb..02abe5f6c1f8 100644 --- a/.pylintrc +++ b/.pylintrc @@ -90,6 +90,7 @@ disable=print-statement, reduce-builtin, standarderror-builtin, unicode-builtin, + xrange-builtin, coerce-method, delslice-method, getslice-method, diff --git a/infra/base-images/all.sh b/infra/base-images/all.sh index f5ac04752809..6d012d5aa1ab 100755 --- a/infra/base-images/all.sh +++ b/infra/base-images/all.sh @@ -20,5 +20,3 @@ docker build -t gcr.io/oss-fuzz-base/base-clang "$@" infra/base-images/base-clan docker build -t gcr.io/oss-fuzz-base/base-builder -t gcr.io/oss-fuzz/base-libfuzzer "$@" infra/base-images/base-builder docker build -t gcr.io/oss-fuzz-base/base-runner "$@" infra/base-images/base-runner docker build -t gcr.io/oss-fuzz-base/base-runner-debug "$@" infra/base-images/base-runner-debug -docker build -t gcr.io/oss-fuzz-base/base-sanitizer-libs-builder "$@" infra/base-images/base-sanitizer-libs-builder -docker build -t gcr.io/oss-fuzz-base/msan-libs-builder "$@" infra/base-images/msan-libs-builder diff --git a/infra/base-images/base-sanitizer-libs-builder/compiler_wrapper.py b/infra/base-images/base-sanitizer-libs-builder/compiler_wrapper.py index 6b4327c10e5c..04aa4207c055 100755 --- a/infra/base-images/base-sanitizer-libs-builder/compiler_wrapper.py +++ b/infra/base-images/base-sanitizer-libs-builder/compiler_wrapper.py @@ -14,7 +14,7 @@ # limitations under the License. # ################################################################################ -"""Wrapper script to call clang or GCC with modified arguments.""" + from __future__ import print_function import os import subprocess @@ -26,23 +26,23 @@ '-aux-info', ] -M32_BIT_ARGS = [ - '-m32', - '-mx32', -] - -def invoked_as_gcc(): +def InvokedAsGcc(): """Return whether or not we're pretending to be GCC.""" return sys.argv[0].endswith('gcc') or sys.argv[0].endswith('g++') -def is_32_bit(args): +def Is32Bit(args): """Return whether or not we're 32-bit.""" + M32_BIT_ARGS = [ + '-m32', + '-mx32', + ] + return any(arg in M32_BIT_ARGS for arg in args) -def filter_wl_arg(arg): +def FilterWlArg(arg): """Remove -z,defs and equivalents from a single -Wl option.""" parts = arg.split(',')[1:] @@ -55,7 +55,7 @@ def filter_wl_arg(arg): if part == '--no-undefined': continue - + filtered.append(part) if filtered: @@ -65,29 +65,29 @@ def filter_wl_arg(arg): return None -def _remove_last_matching(args, find): - for i in range(len(args) - 1, -1, -1): - if args[i] == find: - del args[i] +def _RemoveLastMatching(l, find): + for i in xrange(len(l) - 1, -1, -1): + if l[i] == find: + del l[i] return raise IndexError('Not found') -def remove_z_defs(args): +def RemoveZDefs(args): """Remove unsupported -Wl,-z,defs linker option.""" filtered = [] for arg in args: if arg == '-Wl,defs': - _remove_last_matching(filtered, '-Wl,-z') + _RemoveLastMatching(filtered, '-Wl,-z') continue if arg == '-Wl,--no-undefined': continue if arg.startswith('-Wl,'): - arg = filter_wl_arg(arg) + arg = FilterWlArg(arg) if not arg: continue @@ -96,11 +96,11 @@ def remove_z_defs(args): return filtered -def get_compiler_args(args, is_cxx): +def GetCompilerArgs(args, is_cxx): """Generate compiler args.""" compiler_args = args[1:] - if is_32_bit(args): + if Is32Bit(args): # 32 bit builds not supported. compiler_args.extend([ '-fno-sanitize=memory', @@ -109,7 +109,7 @@ def get_compiler_args(args, is_cxx): return compiler_args - compiler_args = remove_z_defs(compiler_args) + compiler_args = RemoveZDefs(compiler_args) compiler_args.extend([ # FORTIFY_SOURCE is not supported by sanitizers. '-U_FORTIFY_SOURCE', @@ -122,15 +122,15 @@ def get_compiler_args(args, is_cxx): '-fno-lto', ]) - if invoked_as_gcc(): + if InvokedAsGcc(): compiler_args.extend([ - # For better compatibility with flags passed via -Wa,... - '-fno-integrated-as', + # For better compatibility with flags passed via -Wa,... + '-fno-integrated-as', ]) if '-fsanitize=memory' not in args: # If MSan flags weren't added for some reason, add them here. - compiler_args.extend(msan_build.get_injected_flags()) + compiler_args.extend(msan_build.GetInjectedFlags()) if is_cxx: compiler_args.append('-stdlib=libc++') @@ -138,36 +138,35 @@ def get_compiler_args(args, is_cxx): return compiler_args -def find_real_clang(): +def FindRealClang(): """Return path to real clang.""" return os.environ['REAL_CLANG_PATH'] -def fallback_to_gcc(args): +def FallbackToGcc(args): """Check whether if we should fall back to GCC.""" - if not invoked_as_gcc(): + if not InvokedAsGcc(): return False return any(arg in GCC_ONLY_ARGS for arg in args[1:]) def main(args): - """Modify arguments and call the real compiler.""" - if fallback_to_gcc(args): - sys.exit( - subprocess.call(['/usr/bin/' + os.path.basename(args[0])] + args[1:])) + if FallbackToGcc(args): + sys.exit(subprocess.call(['/usr/bin/' + os.path.basename(args[0])] + + args[1:])) is_cxx = args[0].endswith('++') - real_clang = find_real_clang() + real_clang = FindRealClang() if is_cxx: real_clang += '++' - args = [real_clang] + get_compiler_args(args, is_cxx) + args = [real_clang] + GetCompilerArgs(args, is_cxx) debug_log_path = os.getenv('WRAPPER_DEBUG_LOG_PATH') if debug_log_path: - with open(debug_log_path, 'a') as log_file: - log_file.write(str(args) + '\n') + with open(debug_log_path, 'a') as f: + f.write(str(args) + '\n') sys.exit(subprocess.call(args)) diff --git a/infra/base-images/base-sanitizer-libs-builder/compiler_wrapper_test.py b/infra/base-images/base-sanitizer-libs-builder/compiler_wrapper_test.py index c97ecfe76a34..a05592d38bce 100644 --- a/infra/base-images/base-sanitizer-libs-builder/compiler_wrapper_test.py +++ b/infra/base-images/base-sanitizer-libs-builder/compiler_wrapper_test.py @@ -1,19 +1,3 @@ -#!/usr/bin/env python3 -# Copyright 2020 Google LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -################################################################################ """Tests for compiler_wrapper.""" from __future__ import print_function @@ -24,41 +8,35 @@ class CompilerWrapperTest(unittest.TestCase): - """Tests for compiler_wrapper.""" - def test_filter_z_defs(self): - """Reference tests for remove_z_defs.""" - self.assertListEqual(['arg'], - compiler_wrapper.remove_z_defs(['arg', '-Wl,-z,defs'])) + def testFilterZDefs(self): + self.assertListEqual( + ['arg'], + compiler_wrapper.RemoveZDefs(['arg', '-Wl,-z,defs'])) - self.assertListEqual(['arg'], - compiler_wrapper.remove_z_defs( - ['arg', '-Wl,--no-undefined'])) + self.assertListEqual( + ['arg'], + compiler_wrapper.RemoveZDefs(['arg', '-Wl,-z,--no-undefined'])) - self.assertListEqual(['arg', '-Wl,-z,relro'], - compiler_wrapper.remove_z_defs(['arg', - '-Wl,-z,relro'])) + self.assertListEqual( + ['arg', '-Wl,-z,relro'], + compiler_wrapper.RemoveZDefs(['arg', '-Wl,-z,relro'])) - self.assertListEqual(['arg', '-Wl,-soname,lib.so.1,-z,relro'], - compiler_wrapper.remove_z_defs( - ['arg', '-Wl,-soname,lib.so.1,-z,defs,-z,relro'])) + self.assertListEqual( + ['arg', '-Wl,-soname,lib.so.1,-z,relro'], + compiler_wrapper.RemoveZDefs(['arg', '-Wl,-soname,lib.so.1,-z,defs,-z,relro'])) - self.assertListEqual(['arg', '-Wl,-z,relro'], - compiler_wrapper.remove_z_defs( - ['arg', '-Wl,-z,relro,-z,defs'])) + self.assertListEqual( + ['arg', '-Wl,-z,relro'], + compiler_wrapper.RemoveZDefs(['arg', '-Wl,-z,relro,-z,defs'])) - self.assertListEqual(['arg'], - compiler_wrapper.remove_z_defs( - ['arg', '-Wl,-z', '-Wl,defs'])) - - self.assertListEqual(['arg', 'arg2'], - compiler_wrapper.remove_z_defs( - ['arg', 'arg2', '-Wl,--no-undefined'])) - - self.assertListEqual(['arg', 'arg2'], - compiler_wrapper.remove_z_defs( - ['arg', '-Wl,-z', 'arg2', '-Wl,defs'])) + self.assertListEqual( + ['arg'], + compiler_wrapper.RemoveZDefs(['arg', '-Wl,-z', '-Wl,defs'])) + self.assertListEqual( + ['arg', 'arg2'], + compiler_wrapper.RemoveZDefs(['arg', '-Wl,-z', 'arg2', '-Wl,--no-undefined'])) if __name__ == '__main__': unittest.main() diff --git a/infra/base-images/base-sanitizer-libs-builder/msan_build.py b/infra/base-images/base-sanitizer-libs-builder/msan_build.py index 7dd0d82233a5..928b1a596e14 100755 --- a/infra/base-images/base-sanitizer-libs-builder/msan_build.py +++ b/infra/base-images/base-sanitizer-libs-builder/msan_build.py @@ -14,7 +14,7 @@ # limitations under the License. # ################################################################################ -"""Script to build libraries with MemorySanitizer instrumentation.""" + from __future__ import print_function import argparse import imp @@ -43,30 +43,12 @@ '-fno-omit-frame-pointer', ] -C_OR_CXX_DEPS = [ - 'libc++1', - 'libc6', - 'libc++abi1', - 'libgcc1', - 'libstdc++6', -] - -BLACKLISTED_PACKAGES = [ - 'libcapnp-0.5.3', # fails to compile on newer clang. - 'libllvm5.0', - 'libmircore1', - 'libmircommon7', - 'libmirclient9', - 'libmirprotobuf3', - 'multiarch-support', -] - class MSanBuildException(Exception): """Base exception.""" -def get_track_origins_flag(): +def GetTrackOriginsFlag(): """Get the track origins flag.""" if os.getenv('MSAN_NO_TRACK_ORIGINS'): return TRACK_ORIGINS_ARG + '0' @@ -74,12 +56,11 @@ def get_track_origins_flag(): return TRACK_ORIGINS_ARG + '2' -def get_injected_flags(): - """Get the additional command line arguments.""" - return INJECTED_ARGS + [get_track_origins_flag()] +def GetInjectedFlags(): + return INJECTED_ARGS + [GetTrackOriginsFlag()] -def set_up_environment(work_dir): +def SetUpEnvironment(work_dir): """Set up build environment.""" env = {} env['REAL_CLANG_PATH'] = subprocess.check_output(['which', 'clang']).strip() @@ -90,11 +71,9 @@ def set_up_environment(work_dir): bin_dir = os.path.join(work_dir, 'bin') os.mkdir(bin_dir) - dpkg_host_architecture = wrapper_utils.dpkg_host_architecture() - wrapper_utils.create_symlinks( - compiler_wrapper_path, - bin_dir, - [ + dpkg_host_architecture = wrapper_utils.DpkgHostArchitecture() + wrapper_utils.CreateSymlinks( + compiler_wrapper_path, bin_dir, [ 'clang', 'clang++', # Not all build rules respect $CC/$CXX, so make additional symlinks. @@ -109,49 +88,54 @@ def set_up_environment(work_dir): env['CC'] = os.path.join(bin_dir, 'clang') env['CXX'] = os.path.join(bin_dir, 'clang++') - msan_options = ' '.join(get_injected_flags()) + MSAN_OPTIONS = ' '.join(GetInjectedFlags()) # We don't use nostrip because some build rules incorrectly break when it is # passed. Instead we install our own no-op strip binaries. env['DEB_BUILD_OPTIONS'] = ('nocheck parallel=%d' % multiprocessing.cpu_count()) - env['DEB_CFLAGS_APPEND'] = msan_options - env['DEB_CXXFLAGS_APPEND'] = msan_options + ' -stdlib=libc++' - env['DEB_CPPFLAGS_APPEND'] = msan_options - env['DEB_LDFLAGS_APPEND'] = msan_options + env['DEB_CFLAGS_APPEND'] = MSAN_OPTIONS + env['DEB_CXXFLAGS_APPEND'] = MSAN_OPTIONS + ' -stdlib=libc++' + env['DEB_CPPFLAGS_APPEND'] = MSAN_OPTIONS + env['DEB_LDFLAGS_APPEND'] = MSAN_OPTIONS env['DPKG_GENSYMBOLS_CHECK_LEVEL'] = '0' # debian/rules can set DPKG_GENSYMBOLS_CHECK_LEVEL explicitly, so override it. - gen_symbols_wrapper = ('#!/bin/sh\n' - 'export DPKG_GENSYMBOLS_CHECK_LEVEL=0\n' - '/usr/bin/dpkg-gensymbols "$@"\n') + gen_symbols_wrapper = ( + '#!/bin/sh\n' + 'export DPKG_GENSYMBOLS_CHECK_LEVEL=0\n' + '/usr/bin/dpkg-gensymbols "$@"\n') - wrapper_utils.install_wrapper(bin_dir, 'dpkg-gensymbols', gen_symbols_wrapper) + wrapper_utils.InstallWrapper(bin_dir, 'dpkg-gensymbols', + gen_symbols_wrapper) # Install no-op strip binaries. - no_op_strip = ('#!/bin/sh\n' \ + no_op_strip = ('#!/bin/sh\n' 'exit 0\n') - wrapper_utils.install_wrapper(bin_dir, 'strip', no_op_strip, - [dpkg_host_architecture + '-strip']) + wrapper_utils.InstallWrapper( + bin_dir, 'strip', no_op_strip, + [dpkg_host_architecture + '-strip']) env['PATH'] = bin_dir + ':' + os.environ['PATH'] # nocheck doesn't disable override_dh_auto_test. So we have this hack to try # to disable "make check" or "make test" invocations. - make_wrapper = ('#!/bin/bash\n' - 'if [ "$1" = "test" ] || [ "$1" = "check" ]; then\n' - ' exit 0\n' - 'fi\n' - '/usr/bin/make "$@"\n') - wrapper_utils.install_wrapper(bin_dir, 'make', make_wrapper) + make_wrapper = ( + '#!/bin/bash\n' + 'if [ "$1" = "test" ] || [ "$1" = "check" ]; then\n' + ' exit 0\n' + 'fi\n' + '/usr/bin/make "$@"\n') + wrapper_utils.InstallWrapper(bin_dir, 'make', + make_wrapper) # Prevent entire build from failing because of bugs/uninstrumented in tools # that are part of the build. msan_log_dir = os.path.join(work_dir, 'msan') os.mkdir(msan_log_dir) msan_log_path = os.path.join(msan_log_dir, 'log') - env['MSAN_OPTIONS'] = ('halt_on_error=0:exitcode=0:report_umrs=0:log_path=' + - msan_log_path) + env['MSAN_OPTIONS'] = ( + 'halt_on_error=0:exitcode=0:report_umrs=0:log_path=' + msan_log_path) # Increase maximum stack size to prevent tests from failing. limit = 128 * 1024 * 1024 @@ -159,7 +143,7 @@ def set_up_environment(work_dir): return env -def find_package_debs(package_name, work_directory): +def FindPackageDebs(package_name, work_directory): """Find package debs.""" deb_paths = [] cache = apt.Cache() @@ -192,7 +176,7 @@ def find_package_debs(package_name, work_directory): return deb_paths -def extract_libraries(deb_paths, work_directory, output_directory): +def ExtractLibraries(deb_paths, work_directory, output_directory): """Extract libraries from .deb packages.""" extract_directory = os.path.join(work_directory, 'extracted') if os.path.exists(extract_directory): @@ -203,6 +187,7 @@ def extract_libraries(deb_paths, work_directory, output_directory): for deb_path in deb_paths: subprocess.check_call(['dpkg-deb', '-x', deb_path, extract_directory]) + extracted = [] for root, _, filenames in os.walk(extract_directory): if 'libx32' in root or 'lib32' in root: continue @@ -221,8 +206,8 @@ def extract_libraries(deb_paths, work_directory, output_directory): os.makedirs(target_dir) target_file_path = os.path.join(output_directory, rel_file_path) - yield target_file_path - + extracted.append(target_file_path) + if os.path.lexists(target_file_path): os.remove(target_file_path) @@ -230,16 +215,17 @@ def extract_libraries(deb_paths, work_directory, output_directory): link_path = os.readlink(file_path) if os.path.isabs(link_path): # Make absolute links relative. - link_path = os.path.relpath(link_path, - os.path.join('/', rel_directory)) + link_path = os.path.relpath( + link_path, os.path.join('/', rel_directory)) os.symlink(link_path, target_file_path) else: shutil.copy2(file_path, target_file_path) + return extracted + -def get_package(package_name): - """Factory for Package objects.""" +def GetPackage(package_name): apt_cache = apt.Cache() version = apt_cache[package_name].candidate source_name = version.source_name @@ -255,11 +241,11 @@ def get_package(package_name): return module.Package(version) -def patch_rpath(path, output_directory): +def PatchRpath(path, output_directory): """Patch rpath to be relative to $ORIGIN.""" try: - rpaths = subprocess.check_output(['patchelf', '--print-rpath', - path]).strip() + rpaths = subprocess.check_output( + ['patchelf', '--print-rpath', path]).strip() except subprocess.CalledProcessError: return @@ -276,17 +262,37 @@ def patch_rpath(path, output_directory): processed_rpath.append(rpath) continue - processed_rpath.append( - os.path.join('$ORIGIN', os.path.relpath(rpath, rel_directory))) + processed_rpath.append(os.path.join( + '$ORIGIN', + os.path.relpath(rpath, rel_directory))) processed_rpath = ':'.join(processed_rpath) print('Patching rpath for', path, 'to', processed_rpath) subprocess.check_call( - ['patchelf', '--force-rpath', '--set-rpath', processed_rpath, path]) + ['patchelf', '--force-rpath', '--set-rpath', + processed_rpath, path]) -def _collect_dependencies(apt_cache, pkg, cache, dependencies): +def _CollectDependencies(apt_cache, pkg, cache, dependencies): """Collect dependencies that need to be built.""" + C_OR_CXX_DEPS = [ + 'libc++1', + 'libc6', + 'libc++abi1', + 'libgcc1', + 'libstdc++6', + ] + + BLACKLISTED_PACKAGES = [ + 'libcapnp-0.5.3', # fails to compile on newer clang. + 'libllvm5.0', + 'libmircore1', + 'libmircommon7', + 'libmirclient9', + 'libmirprotobuf3', + 'multiarch-support', + ] + if pkg.name in BLACKLISTED_PACKAGES: return False @@ -303,9 +309,8 @@ def _collect_dependencies(apt_cache, pkg, cache, dependencies): if dependency.name in cache: is_c_or_cxx |= cache[dependency.name] else: - is_c_or_cxx |= _collect_dependencies(apt_cache, - apt_cache[dependency.name], cache, - dependencies) + is_c_or_cxx |= _CollectDependencies(apt_cache, apt_cache[dependency.name], + cache, dependencies) if is_c_or_cxx: dependencies.append(pkg.name) @@ -313,24 +318,20 @@ def _collect_dependencies(apt_cache, pkg, cache, dependencies): return is_c_or_cxx -def get_build_list(package_name): +def GetBuildList(package_name): """Get list of packages that need to be built including dependencies.""" apt_cache = apt.Cache() pkg = apt_cache[package_name] dependencies = [] - _collect_dependencies(apt_cache, pkg, {}, dependencies) + _CollectDependencies(apt_cache, pkg, {}, dependencies) return dependencies -class MSanBuilder: +class MSanBuilder(object): """MSan builder.""" - def __init__(self, - debug=False, - log_path=None, - work_dir=None, - no_track_origins=False): + def __init__(self, debug=False, log_path=None, work_dir=None, no_track_origins=False): self.debug = debug self.log_path = log_path self.work_dir = work_dir @@ -345,7 +346,7 @@ def __enter__(self): shutil.rmtree(self.work_dir, ignore_errors=True) os.makedirs(self.work_dir) - self.env = set_up_environment(self.work_dir) + self.env = SetUpEnvironment(self.work_dir) if self.debug and self.log_path: self.env['WRAPPER_DEBUG_LOG_PATH'] = self.log_path @@ -359,16 +360,16 @@ def __exit__(self, exc_type, exc_value, traceback): if not self.debug: shutil.rmtree(self.work_dir, ignore_errors=True) - def build(self, package_name, output_directory, create_subdirs=False): + def Build(self, package_name, output_directory, create_subdirs=False): """Build the package and write results into the output directory.""" - deb_paths = find_package_debs(package_name, self.work_dir) + deb_paths = FindPackageDebs(package_name, self.work_dir) if deb_paths: print('Source package already built for', package_name) else: - pkg = get_package(package_name) + pkg = GetPackage(package_name) - pkg.install_build_deps() - source_directory = pkg.download_source(self.work_dir) + pkg.InstallBuildDeps() + source_directory = pkg.DownloadSource(self.work_dir) print('Source downloaded to', source_directory) # custom bin directory for custom build scripts to write wrappers. @@ -377,10 +378,10 @@ def build(self, package_name, output_directory, create_subdirs=False): env = self.env.copy() env['PATH'] = custom_bin_dir + ':' + env['PATH'] - pkg.build(source_directory, env, custom_bin_dir) + pkg.Build(source_directory, env, custom_bin_dir) shutil.rmtree(custom_bin_dir, ignore_errors=True) - deb_paths = find_package_debs(package_name, self.work_dir) + deb_paths = FindPackageDebs(package_name, self.work_dir) if not deb_paths: raise MSanBuildException('Failed to find .deb packages.') @@ -392,25 +393,22 @@ def build(self, package_name, output_directory, create_subdirs=False): else: extract_directory = output_directory - extracted_paths = list( - extract_libraries(deb_paths, self.work_dir, extract_directory)) + extracted_paths = ExtractLibraries(deb_paths, self.work_dir, + extract_directory) for extracted_path in extracted_paths: if not os.path.islink(extracted_path): - patch_rpath(extracted_path, extract_directory) + PatchRpath(extracted_path, extract_directory) def main(): - """Builds packages with MemorySanitizer instrumentation.""" parser = argparse.ArgumentParser('msan_build.py', description='MSan builder.') parser.add_argument('package_names', nargs='+', help='Name of the packages.') parser.add_argument('output_dir', help='Output directory.') - parser.add_argument('--create-subdirs', - action='store_true', + parser.add_argument('--create-subdirs', action='store_true', help=('Create subdirectories in the output ' 'directory for each package.')) parser.add_argument('--work-dir', help='Work directory.') - parser.add_argument('--no-build-deps', - action='store_true', + parser.add_argument('--no-build-deps', action='store_true', help='Don\'t build dependencies.') parser.add_argument('--debug', action='store_true', help='Enable debug mode.') parser.add_argument('--log-path', help='Log path for debugging.') @@ -433,7 +431,7 @@ def main(): # Get list of packages to build, including all dependencies. for package_name in args.package_names: - for dep in get_build_list(package_name): + for dep in GetBuildList(package_name): if dep in all_packages: continue @@ -447,12 +445,11 @@ def main(): for package_name in package_names: print('\t', package_name) - with MSanBuilder(debug=args.debug, - log_path=args.log_path, + with MSanBuilder(debug=args.debug, log_path=args.log_path, work_dir=args.work_dir, no_track_origins=args.no_track_origins) as builder: for package_name in package_names: - builder.build(package_name, args.output_dir, args.create_subdirs) + builder.Build(package_name, args.output_dir, args.create_subdirs) if __name__ == '__main__': diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/boost1_58.py b/infra/base-images/base-sanitizer-libs-builder/packages/boost1_58.py index 20d1b9df1645..8071b7ecd4f7 100644 --- a/infra/base-images/base-sanitizer-libs-builder/packages/boost1_58.py +++ b/infra/base-images/base-sanitizer-libs-builder/packages/boost1_58.py @@ -14,17 +14,16 @@ # limitations under the License. # ################################################################################ -"""Custom options for boost1.58.""" + import package -class Package(package.Package): # pylint: disable=too-few-public-methods +class Package(package.Package): """boost1.58 package.""" def __init__(self, apt_version): super(Package, self).__init__('boost1.58', apt_version) - def pre_build(self, _source_directory, env, _custom_bin_dir): # pylint: disable=no-self-use - """Pre-build configuration for boost1.58.""" + def PreBuild(self, source_directory, env, custom_bin_dir): # Otherwise py_nonblocking.cpp fails to build. env['DEB_CXXFLAGS_APPEND'] += ' -std=c++98' diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/gnutls28.py b/infra/base-images/base-sanitizer-libs-builder/packages/gnutls28.py index efe8d34b1c66..f8407a66876b 100644 --- a/infra/base-images/base-sanitizer-libs-builder/packages/gnutls28.py +++ b/infra/base-images/base-sanitizer-libs-builder/packages/gnutls28.py @@ -14,22 +14,24 @@ # limitations under the License. # ################################################################################ -"""Custom options for gnutls28.""" + +import os +import shutil + import package import wrapper_utils -class Package(package.Package): # pylint: disable=too-few-public-methods +class Package(package.Package): """gnutls28 package.""" def __init__(self, apt_version): super(Package, self).__init__('gnutls28', apt_version) - def pre_build(self, _source_directory, _env, custom_bin_dir): # pylint: disable=no-self-use - """Pre-build configuration for gnutls28.""" + def PreBuild(self, source_directory, env, custom_bin_dir): configure_wrapper = ( '#!/bin/bash\n' '/usr/bin/dh_auto_configure "$@" --disable-hardware-acceleration') - wrapper_utils.install_wrapper(custom_bin_dir, 'dh_auto_configure', - configure_wrapper) + wrapper_utils.InstallWrapper( + custom_bin_dir, 'dh_auto_configure', configure_wrapper) diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/libgcrypt20.py b/infra/base-images/base-sanitizer-libs-builder/packages/libgcrypt20.py index 948240ae37f0..9d200af6f62b 100644 --- a/infra/base-images/base-sanitizer-libs-builder/packages/libgcrypt20.py +++ b/infra/base-images/base-sanitizer-libs-builder/packages/libgcrypt20.py @@ -14,21 +14,24 @@ # limitations under the License. # ################################################################################ -"""Custom options for libgcrypt20.""" + +import os +import shutil + import package import wrapper_utils -class Package(package.Package): # pylint: disable=too-few-public-methods +class Package(package.Package): """libgcrypt20 package.""" def __init__(self, apt_version): super(Package, self).__init__('libgcrypt20', apt_version) - def pre_build(self, _source_directory, _env, custom_bin_dir): # pylint: disable=no-self-use - """Pre-build configuration for libgcrypt20.""" - configure_wrapper = ('#!/bin/bash\n' - '/usr/bin/dh_auto_configure "$@" --disable-asm') + def PreBuild(self, source_directory, env, custom_bin_dir): + configure_wrapper = ( + '#!/bin/bash\n' + '/usr/bin/dh_auto_configure "$@" --disable-asm') - wrapper_utils.install_wrapper(custom_bin_dir, 'dh_auto_configure', - configure_wrapper) + wrapper_utils.InstallWrapper( + custom_bin_dir, 'dh_auto_configure', configure_wrapper) diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/mesa.py b/infra/base-images/base-sanitizer-libs-builder/packages/mesa.py index 9b3d6e3a18da..ec2e9d217ef4 100644 --- a/infra/base-images/base-sanitizer-libs-builder/packages/mesa.py +++ b/infra/base-images/base-sanitizer-libs-builder/packages/mesa.py @@ -14,16 +14,15 @@ # limitations under the License. # ################################################################################ -"""Custom options for mesa.""" + import package -class Package(package.Package): # pylint: disable=too-few-public-methods +class Package(package.Package): """mesa package.""" def __init__(self, apt_version): super(Package, self).__init__('mesa', apt_version) - def pre_build(self, _source_directory, env, _custom_bin_dir): # pylint: disable=no-self-use - """Pre-build configuration for mesa.""" + def PreBuild(self, source_directory, env, custom_bin_dir): env['DEB_CXXFLAGS_APPEND'] += ' -std=c++11' diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/nettle.py b/infra/base-images/base-sanitizer-libs-builder/packages/nettle.py index 26756579be56..e1b0e2f81346 100644 --- a/infra/base-images/base-sanitizer-libs-builder/packages/nettle.py +++ b/infra/base-images/base-sanitizer-libs-builder/packages/nettle.py @@ -14,28 +14,28 @@ # limitations under the License. # ################################################################################ -"""Custom configure options for nettle.""" + import os import shutil import package -def add_no_asm_arg(config_path): +def AddNoAsmArg(config_path): """Add --disable-assembler to config scripts.""" shutil.move(config_path, config_path + '.real') - with open(config_path, 'w') as config_file: - config_file.write('#!/bin/sh\n' - '%s.real --disable-assembler "$@"\n' % config_path) - os.chmod(config_path, 0o755) + with open(config_path, 'w') as f: + f.write( + '#!/bin/sh\n' + '%s.real --disable-assembler "$@"\n' % config_path) + os.chmod(config_path, 0755) -class Package(package.Package): # pylint: disable=too-few-public-methods +class Package(package.Package): """nettle package.""" def __init__(self, apt_version): super(Package, self).__init__('nettle', apt_version) - def pre_build(self, source_directory, _env, _custom_bin_dir): # pylint: disable=no-self-use - """Hook function to customize nettle's configuration before building.""" - add_no_asm_arg(os.path.join(source_directory, 'configure')) + def PreBuild(self, source_directory, env, custom_bin_dir): + AddNoAsmArg(os.path.join(source_directory, 'configure')) diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/openssl.py b/infra/base-images/base-sanitizer-libs-builder/packages/openssl.py index 0fb311ff84c0..e24ccc58813d 100644 --- a/infra/base-images/base-sanitizer-libs-builder/packages/openssl.py +++ b/infra/base-images/base-sanitizer-libs-builder/packages/openssl.py @@ -14,29 +14,29 @@ # limitations under the License. # ################################################################################ -"""Custom configure options for openssl.""" + import os import shutil import package -def add_no_asm_arg(config_path): +def AddNoAsmArg(config_path): """Add --no-asm to config scripts.""" shutil.move(config_path, config_path + '.real') - with open(config_path, 'w') as config_file: - config_file.write('#!/bin/sh\n' \ - '%s.real no-asm "$@"\n' % config_path) - os.chmod(config_path, 0o755) + with open(config_path, 'w') as f: + f.write( + '#!/bin/sh\n' + '%s.real no-asm "$@"\n' % config_path) + os.chmod(config_path, 0755) -class Package(package.Package): # pylint: disable=too-few-public-methods +class Package(package.Package): """openssl package.""" def __init__(self, apt_version): super(Package, self).__init__('openssl', apt_version) - def pre_build(self, source_directory, _env, _custom_bin_dir): # pylint: disable=no-self-use - """Hook function to customize openssl's configuration before building.""" - add_no_asm_arg(os.path.join(source_directory, 'Configure')) - add_no_asm_arg(os.path.join(source_directory, 'config')) + def PreBuild(self, source_directory, env, custom_bin_dir): + AddNoAsmArg(os.path.join(source_directory, 'Configure')) + AddNoAsmArg(os.path.join(source_directory, 'config')) diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/package.py b/infra/base-images/base-sanitizer-libs-builder/packages/package.py index a8d40e17bc9a..059c23587051 100644 --- a/infra/base-images/base-sanitizer-libs-builder/packages/package.py +++ b/infra/base-images/base-sanitizer-libs-builder/packages/package.py @@ -14,7 +14,7 @@ # limitations under the License. # ################################################################################ -"""Base class and utility functions for all libraries that require customized build processes.""" + import os import subprocess @@ -23,62 +23,60 @@ SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) -def apply_patch(source_directory, patch_name): +def ApplyPatch(source_directory, patch_name): """Apply custom patch.""" - subprocess.check_call( - ['patch', '-p1', '-i', - os.path.join(SCRIPT_DIR, patch_name)], - cwd=source_directory) + subprocess.check_call(['patch', '-p1', '-i', + os.path.join(SCRIPT_DIR, patch_name)], + cwd=source_directory) class PackageException(Exception): """Base package exception.""" -class Package: +class Package(object): """Base package.""" def __init__(self, name, apt_version): self.name = name self.apt_version = apt_version - def pre_build(self, _source_directory, _env, _custom_bin_dir): # pylint: disable=no-self-use - """Default no-op pre-build hook function.""" + def PreBuild(self, source_directory, env, custom_bin_dir): return - def post_build(self, _source_directory, _env, _custom_bin_dir): # pylint: disable=no-self-use - """Default no-op post-build hook function.""" + def PostBuild(self, source_directory, env, custom_bin_dir): return - def pre_download(self, _download_directory): # pylint: disable=no-self-use - """Default no-op pre-download hook function.""" + def PreDownload(self, download_directory): return - def post_download(self, _source_directory): # pylint: disable=no-self-use - """Default no-op post-download hook function.""" + def PostDownload(self, source_directory): return - def install_build_deps(self): + def InstallBuildDeps(self): """Install build dependencies for a package.""" subprocess.check_call(['apt-get', 'update']) subprocess.check_call(['apt-get', 'build-dep', '-y', self.name]) # Reload package after update. - self.apt_version = apt.Cache()[self.apt_version.package.name].candidate + self.apt_version = ( + apt.Cache()[self.apt_version.package.name].candidate) - def download_source(self, download_directory): + def DownloadSource(self, download_directory): """Download the source for a package.""" - self.pre_download(download_directory) + self.PreDownload(download_directory) source_directory = self.apt_version.fetch_source(download_directory) - self.post_download(source_directory) + self.PostDownload(source_directory) return source_directory - def build(self, source_directory, env, custom_bin_dir): + def Build(self, source_directory, env, custom_bin_dir): """Build .deb packages.""" - self.pre_build(source_directory, env, custom_bin_dir) - subprocess.check_call(['dpkg-buildpackage', '-us', '-uc', '-B'], - cwd=source_directory, - env=env) - self.post_build(source_directory, env, custom_bin_dir) + self.PreBuild(source_directory, env, custom_bin_dir) + subprocess.check_call( + ['dpkg-buildpackage', '-us', '-uc', '-B'], + cwd=source_directory, env=env) + self.PostBuild(source_directory, env, custom_bin_dir) + + diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/pixman.py b/infra/base-images/base-sanitizer-libs-builder/packages/pixman.py index e693298ed590..d63b1468f7b0 100644 --- a/infra/base-images/base-sanitizer-libs-builder/packages/pixman.py +++ b/infra/base-images/base-sanitizer-libs-builder/packages/pixman.py @@ -14,8 +14,9 @@ # limitations under the License. # ################################################################################ -"""Custom options for pixman.""" + import os +import shutil import package @@ -26,15 +27,16 @@ class Package(package.Package): def __init__(self, apt_version): super(Package, self).__init__('pixman', apt_version) - def post_download(self, source_directory): # pylint: disable=no-self-use - """Workaround for incorrect checking of GCC vector extension availability.""" - os.system('sed s/support_for_gcc_vector_extensions=yes/' - 'support_for_gcc_vector_extensions=no/ -i %s/configure.ac' % - source_directory) + def PostDownload(self, source_directory): + # Incorrect checking of GCC vector extension availability. + os.system( + 'sed s/support_for_gcc_vector_extensions=yes/' + 'support_for_gcc_vector_extensions=no/ -i %s/configure.ac' % + source_directory) - def pre_build(self, _source_directory, env, _custom_bin_dir): # pylint: disable=no-self-use - """Pre-build configuration for pixman.""" + def PreBuild(self, source_directory, env, custom_bin_dir): blacklist_flag = ' -fsanitize-blacklist=' + os.path.join( - os.path.dirname(os.path.abspath(__file__)), 'pixman_blacklist.txt') + os.path.dirname(os.path.abspath(__file__)), + 'pixman_blacklist.txt') env['DEB_CXXFLAGS_APPEND'] += blacklist_flag env['DEB_CFLAGS_APPEND'] += blacklist_flag diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/pulseaudio.py b/infra/base-images/base-sanitizer-libs-builder/packages/pulseaudio.py index c4fd9f4a6b26..853b9e7240bf 100644 --- a/infra/base-images/base-sanitizer-libs-builder/packages/pulseaudio.py +++ b/infra/base-images/base-sanitizer-libs-builder/packages/pulseaudio.py @@ -14,20 +14,22 @@ # limitations under the License. # ################################################################################ -"""Custom options for PulseAudio.""" + from __future__ import print_function +import glob import os +import subprocess import package -class Package(package.Package): # pylint: disable=too-few-public-methods +class Package(package.Package): """PulseAudio package.""" def __init__(self, apt_version): super(Package, self).__init__('pulseaudio', apt_version) - def post_download(self, source_directory): # pylint: disable=no-self-use + def PostDownload(self, source_directory): """Remove blacklisted patches.""" # Fix *droid* patches. bad_patch_path = os.path.join( @@ -37,4 +39,4 @@ def post_download(self, source_directory): # pylint: disable=no-self-use return print('Applying custom patches.') - package.apply_patch(source_directory, 'pulseaudio_fix_android.patch') + package.ApplyPatch(source_directory, 'pulseaudio_fix_android.patch') diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/sqlite3.py b/infra/base-images/base-sanitizer-libs-builder/packages/sqlite3.py index caed9c4d11f5..3e1a1070fa69 100644 --- a/infra/base-images/base-sanitizer-libs-builder/packages/sqlite3.py +++ b/infra/base-images/base-sanitizer-libs-builder/packages/sqlite3.py @@ -14,19 +14,19 @@ # limitations under the License. # ################################################################################ -"""Custom options for sqlite3.""" + import os import package -class Package(package.Package): # pylint: disable=too-few-public-methods +class Package(package.Package): """sqlite3 package.""" def __init__(self, apt_version): super(Package, self).__init__('sqlite3', apt_version) - def pre_build(self, source_directory, _env, _custom_bin_dir): # pylint: disable=no-self-use - """Pre-build configuration for sqlite3.""" - os.system('sed -i "s/package ifneeded sqlite3//" %s/debian/rules' % - source_directory) + def PreBuild(self, source_directory, env, custom_bin_dir): + os.system( + 'sed -i "s/package ifneeded sqlite3//" %s/debian/rules' % + source_directory) diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/systemd.py b/infra/base-images/base-sanitizer-libs-builder/packages/systemd.py index 48922d8badda..5cb6d60bedc5 100644 --- a/infra/base-images/base-sanitizer-libs-builder/packages/systemd.py +++ b/infra/base-images/base-sanitizer-libs-builder/packages/systemd.py @@ -14,27 +14,29 @@ # limitations under the License. # ################################################################################ -"""Custom options for systemd.""" + from __future__ import print_function +import glob +import os +import subprocess import package import wrapper_utils -class Package(package.Package): # pylint: disable=too-few-public-methods +class Package(package.Package): """systemd package.""" def __init__(self, apt_version): super(Package, self).__init__('systemd', apt_version) - def pre_build(self, _source_directory, _env, custom_bin_dir): # pylint: disable=no-self-use - """Pre-build installation of a wrapper script for systemd.""" + def PreBuild(self, source_directory, env, custom_bin_dir): # Hide msan symbols from nm. the systemd build system uses this to find # undefined symbols and errors out if it does. - nm_wrapper = ('#!/bin/bash\n' - '/usr/bin/nm "$@" | grep -E -v "U (__msan|memset)"\n' - 'exit ${PIPESTATUS[0]}\n') + nm_wrapper = ( + '#!/bin/bash\n' + '/usr/bin/nm "$@" | grep -E -v "U (__msan|memset)"\n' + 'exit ${PIPESTATUS[0]}\n') - wrapper_utils.install_wrapper( - custom_bin_dir, 'nm', nm_wrapper, - [wrapper_utils.dpkg_host_architecture() + '-nm']) + wrapper_utils.InstallWrapper(custom_bin_dir, 'nm', nm_wrapper, + [wrapper_utils.DpkgHostArchitecture() + '-nm']) diff --git a/infra/base-images/base-sanitizer-libs-builder/packages/tar.py b/infra/base-images/base-sanitizer-libs-builder/packages/tar.py index 6aab2825eb23..74abd5c72030 100644 --- a/infra/base-images/base-sanitizer-libs-builder/packages/tar.py +++ b/infra/base-images/base-sanitizer-libs-builder/packages/tar.py @@ -14,16 +14,15 @@ # limitations under the License. # ################################################################################ -"""Custom options for tar.""" + import package -class Package(package.Package): # pylint: disable=too-few-public-methods +class Package(package.Package): """tar package.""" def __init__(self, apt_version): super(Package, self).__init__('tar', apt_version) - def pre_build(self, _source_directory, env, _custom_bin_dir): # pylint: disable=no-self-use - """Pre-build configuration for tar.""" + def PreBuild(self, source_directory, env, custom_bin_dir): env['FORCE_UNSAFE_CONFIGURE'] = '1' diff --git a/infra/base-images/base-sanitizer-libs-builder/patch_build.py b/infra/base-images/base-sanitizer-libs-builder/patch_build.py index e594ce0c121d..cb1f4b1d74a2 100755 --- a/infra/base-images/base-sanitizer-libs-builder/patch_build.py +++ b/infra/base-images/base-sanitizer-libs-builder/patch_build.py @@ -14,7 +14,7 @@ # limitations under the License. # ################################################################################ -"""Helper script to patch rpath in all binaries to point to instrumented libraries.""" + from __future__ import print_function import argparse import os @@ -25,36 +25,27 @@ INSTRUMENTED_LIBRARIES_DIRNAME = 'instrumented_libraries' MSAN_LIBS_PATH = os.getenv('MSAN_LIBS_PATH', '/msan') -INTERCEPTED_LIBRARIES = { - '/lib/x86_64-linux-gnu/libm.so.6', - '/lib/x86_64-linux-gnu/libpthread.so.0', - '/lib/x86_64-linux-gnu/librt.so.1', - '/lib/x86_64-linux-gnu/libdl.so.2', - '/lib/x86_64-linux-gnu/libgcc_s.so.1', - '/lib/x86_64-linux-gnu/libc.so.6', -} -LDD_OUTPUT_PATTERN = re.compile(r'\s*([^\s]+)\s*=>\s*([^\s]+)') - - -def is_elf(file_path): + + +def IsElf(file_path): """Whether if the file is an elf file.""" - with open(file_path) as elf_file: - return elf_file.read(4) == '\x7fELF' + with open(file_path) as f: + return f.read(4) == '\x7fELF' -def ldd(binary_path): +def Ldd(binary_path): """Run ldd on a file.""" try: - output = subprocess.check_output(['ldd', binary_path], - stderr=subprocess.STDOUT) + output = subprocess.check_output(['ldd', binary_path], stderr=subprocess.STDOUT) except subprocess.CalledProcessError: print('Failed to call ldd on', binary_path, file=sys.stderr) return [] libs = [] + OUTPUT_PATTERN = re.compile(r'\s*([^\s]+)\s*=>\s*([^\s]+)') for line in output.splitlines(): - match = LDD_OUTPUT_PATTERN.match(line) + match = OUTPUT_PATTERN.match(line) if not match: continue @@ -63,12 +54,12 @@ def ldd(binary_path): return libs -def find_lib(path): +def FindLib(path): """Find instrumented version of lib.""" candidate_path = os.path.join(MSAN_LIBS_PATH, path[1:]) if os.path.exists(candidate_path): return candidate_path - + for lib_dir in os.listdir(MSAN_LIBS_PATH): candidate_path = os.path.join(MSAN_LIBS_PATH, lib_dir, path[1:]) if os.path.exists(candidate_path): @@ -77,20 +68,18 @@ def find_lib(path): return None -def patch_binary(binary_path, instrumented_dir): +def PatchBinary(binary_path, instrumented_dir): """Patch binary to link to instrumented libs.""" extra_rpaths = set() - for _name, path in ldd(binary_path): + for name, path in Ldd(binary_path): if not os.path.isabs(path): continue - instrumented_path = find_lib(path) + instrumented_path = FindLib(path) if not instrumented_path: - if path not in INTERCEPTED_LIBRARIES: - print('WARNING: Instrumented library not found for', - path, - file=sys.stderr) + print('WARNING: Instrumented library not found for', path, + file=sys.stderr) continue target_path = os.path.join(instrumented_dir, path[1:]) @@ -116,12 +105,12 @@ def patch_binary(binary_path, instrumented_dir): print('Patching rpath for', binary_path, 'from', existing_rpaths, 'to', processed_rpaths) - subprocess.check_call([ - 'patchelf', '--force-rpath', '--set-rpath', processed_rpaths, binary_path - ]) + subprocess.check_call( + ['patchelf', '--force-rpath', '--set-rpath', + processed_rpaths, binary_path]) -def patch_build(output_directory): +def PatchBuild(output_directory): """Patch build to use msan libs.""" instrumented_dir = os.path.join(output_directory, INSTRUMENTED_LIBRARIES_DIRNAME) @@ -135,21 +124,19 @@ def patch_build(output_directory): if os.path.islink(file_path): continue - if not is_elf(file_path): + if not IsElf(file_path): continue - patch_binary(file_path, instrumented_dir) + PatchBinary(file_path, instrumented_dir) def main(): - """Patch binaries to use instrumented libraries for all their dynamic objects.""" - parser = argparse.ArgumentParser('patch_build.py', - description='MSan build patcher.') + parser = argparse.ArgumentParser('patch_build.py', description='MSan build patcher.') parser.add_argument('output_dir', help='Output directory.') args = parser.parse_args() - patch_build(os.path.abspath(args.output_dir)) + PatchBuild(os.path.abspath(args.output_dir)) if __name__ == '__main__': diff --git a/infra/base-images/base-sanitizer-libs-builder/wrapper_utils.py b/infra/base-images/base-sanitizer-libs-builder/wrapper_utils.py index 8d790bc76b99..0cbf1677d082 100644 --- a/infra/base-images/base-sanitizer-libs-builder/wrapper_utils.py +++ b/infra/base-images/base-sanitizer-libs-builder/wrapper_utils.py @@ -14,32 +14,33 @@ # limitations under the License. # ################################################################################ -"""Functions to help with wrapper scripts.""" + from __future__ import print_function +import contextlib import os import subprocess -def dpkg_host_architecture(): +def DpkgHostArchitecture(): """Return the host architecture.""" - return subprocess.check_output(['dpkg-architecture', - '-qDEB_HOST_GNU_TYPE']).strip() + return subprocess.check_output( + ['dpkg-architecture', '-qDEB_HOST_GNU_TYPE']).strip() -def install_wrapper(bin_dir, name, contents, extra_names=None): +def InstallWrapper(bin_dir, name, contents, extra_names=None): """Install a custom wrapper script into |bin_dir|.""" path = os.path.join(bin_dir, name) - with open(path, 'w') as wrapper_file: - wrapper_file.write(contents) + with open(path, 'w') as f: + f.write(contents) - os.chmod(path, 0o755) + os.chmod(path, 0755) if extra_names: - create_symlinks(path, bin_dir, extra_names) + CreateSymlinks(path, bin_dir, extra_names) -def create_symlinks(original_path, bin_dir, extra_names): +def CreateSymlinks(original_path, bin_dir, extra_names): """Create symlinks.""" for extra_name in extra_names: extra_path = os.path.join(bin_dir, extra_name)