diff --git a/src/builder.py b/src/builder.py index 1c58892..b741226 100644 --- a/src/builder.py +++ b/src/builder.py @@ -52,27 +52,27 @@ def __str__(self): def gnu_configure(self, args=[], inplace=False): for path in util.walk_files(self._path): - filename = os.path.basename(path) + filename = path.name if filename in {'config.guess', 'config.sub'}: # Replace the config.guess and config.sub files by # up-to-date copies. The copies provided by the tarball # rarely support CloudABI. - os.unlink(path) - shutil.copy(os.path.join(config.DIR_RESOURCES, filename), path) + path.unlink() + shutil.copy(config.DIR_RESOURCES / filename, path) elif filename == 'ltmain.sh': # Patch up libtool to archive object files in sorted # order. This has been fixed in the meantime. - with open(path, 'r') as fin, open(path + '.new', 'w') as fout: + with path.open('r') as fin, (path + '.new').open('w') as fout: for l in fin.readlines(): # Add sort to the pipeline. fout.write(l.replace( '-print | $NL2SP', '-print | sort | $NL2SP')) shutil.copymode(path, path + '.new') - os.rename(path + '.new', path) + (path + '.new').rename(path) elif filename == 'configure': # Patch up configure scripts to remove constructs that are known # to fail, for example due to functions being missing. - with open(path, 'rb') as fin, open(path + '.new', 'wb') as fout: + with path.open('rb') as fin, (path + '.new').open('wb') as fout: for l in fin.readlines(): # Bad C99 features test. if l.startswith(b'#define showlist(...)'): @@ -81,20 +81,20 @@ def gnu_configure(self, args=[], inplace=False): l = b'#define report(...) fprintf (stderr, __VA_ARGS__)\n' fout.write(l) shutil.copymode(path, path + '.new') - os.rename(path + '.new', path) + (path + '.new').rename(path) # Run the configure script in a separate directory. builddir = (self._path if inplace else self._builder._build_directory.get_new_directory()) self._builder.gnu_configure( - builddir, os.path.join(self._path, 'configure'), args) + builddir, self._path / 'configure', args) return FileHandle(self._builder, builddir) def compile(self, args=[]): output = self._path + '.o' - os.chdir(os.path.dirname(self._path)) - ext = os.path.splitext(self._path)[1] + os.chdir(self._path.parent) + ext = self._path.suffix if ext in {'.c', '.S'}: log.info('CC %s', self._path) subprocess.check_call( @@ -124,7 +124,7 @@ def host(self): return FileHandle(self._builder._host_builder, self._path) def rename(self, dst): - os.rename(self._path, dst._path) + self._path.rename(dst._path) def cmake(self, args=[]): builddir = self._builder._build_directory.get_new_directory() @@ -133,11 +133,11 @@ def cmake(self, args=[]): # Skip directory names. while True: - entries = os.listdir(source_directory) + entries = list(source_directory.iterdir()) if len(entries) != 1: break - new_directory = os.path.join(source_directory, entries[0]) - if not os.path.isdir(new_directory): + new_directory = source_directory / entries[0] + if not new_directory.is_dir(): break source_directory = new_directory @@ -152,7 +152,7 @@ def make_install(self, args=['install']): self.run(['make', 'DESTDIR=' + stagedir] + args) return FileHandle( self._builder, - os.path.join(stagedir, self._builder.get_prefix()[1:])) + stagedir.pathjoin(self._builder.get_prefix()[1:])) def ninja(self): self.run(['ninja']) @@ -162,13 +162,13 @@ def ninja_install(self): self.run(['DESTDIR=' + stagedir, 'ninja', 'install']) return FileHandle( self._builder, - os.path.join(stagedir, self._builder.get_prefix()[1:])) + stagedir.pathjoin(self._builder.get_prefix()[1:])) def open(self, mode): - return open(self._path, mode) + return self._path.open(mode) def path(self, path): - return FileHandle(self._builder, os.path.join(self._path, path)) + return FileHandle(self._builder, self._path / path) def remove(self): util.remove(self._path) @@ -178,7 +178,7 @@ def run(self, command): def symlink(self, contents): util.remove(self._path) - os.symlink(contents, self._path) + self._path.symlink_to(contents) def unhardcode_paths(self): self._builder.unhardcode_paths(self._path) @@ -247,8 +247,8 @@ def prefix(self): return self._builder.get_prefix() def resource(self, name): - source = os.path.join(self._resource_directory, name) - target = os.path.join(config.DIR_BUILDROOT, 'build', name) + source = self._resource_directory / name + target = config.DIR_BUILDROOT / 'build', name util.make_parent_dir(target) util.copy_file(source, target, False) return FileHandle(self._builder, target) @@ -263,22 +263,22 @@ class BuildDirectory: def __init__(self): self._sequence_number = 0 - self._builddir = os.path.join(config.DIR_BUILDROOT, 'build') + self._builddir = config.DIR_BUILDROOT / 'build' def get_new_archive(self): - path = os.path.join(self._builddir, 'lib%d.a' % self._sequence_number) + path = self._builddir.pathjoin('lib%d.a' % self._sequence_number) util.make_parent_dir(path) self._sequence_number += 1 return path def get_new_directory(self): - path = os.path.join(self._builddir, str(self._sequence_number)) + path = self._builddir.pathjoin(str(self._sequence_number)) util.make_dir(path) self._sequence_number += 1 return path def get_new_executable(self): - path = os.path.join(self._builddir, 'bin%d' % self._sequence_number) + path = self._builddir.pathjoin('bin%d' % self._sequence_number) util.make_parent_dir(path) self._sequence_number += 1 return path @@ -291,7 +291,7 @@ def __init__(self, build_directory, install_directory): self._install_directory = install_directory self._cflags = [ - '-O2', '-I' + os.path.join(self.get_prefix(), 'include'), + '-O2', '-I' + self.get_prefix().pathjoin('include'), ] def gnu_configure(self, builddir, script, args): @@ -317,7 +317,7 @@ def get_cxx(): def get_gnu_triple(): # Run config.guess to determine the GNU triple of the system # we're running on. - config_guess = os.path.join(config.DIR_RESOURCES, 'config.guess') + config_guess = config.DIR_RESOURCES / 'config.guess' triple = subprocess.check_output(config_guess) return str(triple, encoding='ASCII').strip() @@ -327,12 +327,12 @@ def get_prefix(): def install(self, source, target): log.info('INSTALL %s->%s', source, target) - target = os.path.join(self._install_directory, target) + target = self._install_directory / target for source_file, target_file in util.walk_files_concurrently( source, target): # As these are bootstrapping tools, there is no need to # preserve any documentation and locales. - path = os.path.relpath(target_file, target) + path = target_file.relative_to(target) if (path != 'lib/charset.alias' and not path.startswith('share/doc/') and not path.startswith('share/info/') and @@ -349,8 +349,8 @@ def run(self, cwd, command): 'CXX=' + self.get_cxx(), 'CFLAGS=' + ' '.join(self._cflags), 'CXXFLAGS=' + ' '.join(self._cflags), - 'LDFLAGS=-L' + os.path.join(self.get_prefix(), 'lib'), - 'PATH=%s:%s' % (os.path.join(self.get_prefix(), 'bin'), + 'LDFLAGS=-L' + self.get_prefix().pathjoin('lib'), + 'PATH=%s:%s' % (self.get_prefix().pathjoin('bin'), os.getenv('PATH')), ] + command) @@ -368,8 +368,8 @@ def __init__(self, build_directory, install_directory, arch): self._prefix = '/' + ''.join( random.choice(string.ascii_letters) for i in range(16)) - self._bindir = os.path.join(config.DIR_BUILDROOT, 'bin') - self._localbase = os.path.join(config.DIR_BUILDROOT, self._arch) + self._bindir = config.DIR_BUILDROOT / 'bin' + self._localbase = config.DIR_BUILDROOT / self._arch self._cflags = [ '-O2', '-Werror=implicit-function-declaration', '-Werror=date-time', ] @@ -378,7 +378,7 @@ def __init__(self, build_directory, install_directory, arch): self._host_builder = HostBuilder(build_directory, None) def _tool(self, name): - return os.path.join(self._bindir, '%s-%s' % (self._arch, name)) + return self._bindir.pathjoin('%s-%s' % (self._arch, name)) def archive(self, object_files): objs = sorted(object_files) @@ -432,29 +432,29 @@ def get_prefix(self): return self._prefix def _unhardcode(self, source, target): - assert not os.path.islink(source) - with open(source, 'r') as f: + assert not source.is_symlink() + with source.open('r') as f: contents = f.read() contents = (contents .replace(self.get_prefix(), '%%PREFIX%%') .replace(self._localbase, '%%PREFIX%%')) - with open(target, 'w') as f: + with target.open('w') as f: f.write(contents) def unhardcode_paths(self, path): self._unhardcode(path, path + '.template') shutil.copymode(path, path + '.template') - os.unlink(path) + path.unlink() def install(self, source, target): log.info('INSTALL %s->%s', source, target) - target = os.path.join(self._install_directory, target) + target = self._install_directory / target for source_file, target_file in util.walk_files_concurrently( source, target): util.make_parent_dir(target_file) - relpath = os.path.relpath(target_file, self._install_directory) - ext = os.path.splitext(source_file)[1] - if ext in {'.la', '.pc'} and not os.path.islink(source_file): + relpath = target_file.relative_to(self._install_directory) + ext = source_file.suffix + if ext in {'.la', '.pc'} and not source_file.is_symlink(): # Remove references to the installation prefix and the # localbase directory from libtool archives and # pkg-config files. diff --git a/src/catalog.py b/src/catalog.py index 2a6118a..1af675b 100644 --- a/src/catalog.py +++ b/src/catalog.py @@ -32,7 +32,7 @@ def __init__(self, old_path, new_path): @staticmethod def _get_suggested_mode(path): - mode = os.lstat(path).st_mode + mode = path.lstat().st_mode if stat.S_ISLNK(mode): # Symbolic links. return 0o777 @@ -45,32 +45,32 @@ def _get_suggested_mode(path): @staticmethod def _sanitize_permissions(directory, directory_mode=0o555): - for root, dirs, files in os.walk(directory): + for root, dirs, files in util.walk(directory): util.lchmod(root, directory_mode) for filename in files: - path = os.path.join(root, filename) + path = root / filename util.lchmod(path, Catalog._get_suggested_mode(path)) @staticmethod def _run_tar(args): subprocess.check_call([ - os.path.join(config.DIR_BUILDROOT, 'bin/bsdtar') + config.DIR_BUILDROOT / 'bin/bsdtar' ] + args) def insert(self, package, version, source): - target = os.path.join( - self._new_path, self._get_filename(package, version)) + target = ( + self._new_path).pathjoin(self._get_filename(package, version)) util.make_dir(self._new_path) util.remove(target) - os.link(source, target) + source.link(target) self._packages.add((package, version)) def lookup_at_version(self, package, version): if self._old_path: - path = os.path.join( - self._old_path, + path = ( + self._old_path).pathjoin( self._get_filename(package, version)) - if os.path.exists(path): + if path.exists(): return path return None @@ -94,7 +94,7 @@ def __init__(self, old_path, new_path): # packages we're going to build. self._existing = collections.defaultdict(FullVersion) if old_path: - for root, dirs, files in os.walk(old_path): + for root, dirs, files in util.walk(old_path): for filename in files: parts = filename.split('_') if len(parts) == 3 and parts[2] == 'all.deb': @@ -145,41 +145,41 @@ def finish(self, private_key): def write_entry(f, package, version): f.write(self._get_control_snippet(package, version)) filename = self._get_filename(package, version) - path = os.path.join(self._new_path, filename) + path = self._new_path / filename f.write( 'Filename: %s\n' 'Size: %u\n' 'SHA256: %s\n' % ( filename, - os.path.getsize(path), + path.stat().st_size, util.sha256(path).hexdigest(), )) f.write('\n') - index = os.path.join(self._new_path, 'Packages') - with open(index, 'wt') as f, lzma.open(index + '.xz', 'wt') as f_xz: + index = self._new_path / 'Packages' + with index.open('wt') as f, lzma.open(index + '.xz', 'wt') as f_xz: for package, version in self._packages: write_entry(f, package, version) write_entry(f_xz, package, version) # Link the index into the per-architecture directory. for arch in self._architectures: - index_arch = os.path.join( - self._new_path, + index_arch = ( + self._new_path).pathjoin( 'dists/cloudabi/cloudabi/binary-%s/Packages' % arch) util.make_parent_dir(index_arch) - os.link(index, index_arch) - os.link(index + '.xz', index_arch + '.xz') + index.link(index_arch) + (index + '.xz').link(index_arch + '.xz') checksum = util.sha256(index).hexdigest() checksum_xz = util.sha256(index + '.xz').hexdigest() - size = os.path.getsize(index) - size_xz = os.path.getsize(index + '.xz') - os.unlink(index) - os.unlink(index + '.xz') + size = index.stat().st_size + size_xz = (index + '.xz').stat().st_size + index.unlink() + (index + '.xz').unlink() # Create the InRelease file. - with open( - os.path.join(self._new_path, 'dists/cloudabi/InRelease'), 'w' + with ( + self._new_path / 'dists/cloudabi/InRelease').open('w' ) as f, subprocess.Popen([ 'gpg', '--local-user', private_key, '--armor', '--sign', '--clearsign', '--digest-algo', 'SHA256', @@ -209,12 +209,12 @@ def package(self, package, version): log.info('PKG %s', self._get_filename(package, version)) rootdir = config.DIR_BUILDROOT - debian_binary = os.path.join(rootdir, 'debian-binary') - controldir = os.path.join(rootdir, 'control') - datadir = os.path.join(rootdir, 'data') + debian_binary = rootdir / 'debian-binary' + controldir = rootdir / 'control' + datadir = rootdir / 'data' # Create 'debian-binary' file. - with open(debian_binary, 'w') as f: + with debian_binary.open('w') as f: f.write('2.0\n') def tar(directory): @@ -227,26 +227,26 @@ def tar(directory): # Create 'data.tar.xz' tarball that contains the files that need # to be installed by the package. - prefix = os.path.join('/usr', package.get_arch()) + prefix = '/usr'.pathjoin(package.get_arch()) util.make_dir(datadir) - package.extract(os.path.join(datadir, prefix[1:]), prefix) + package.extract(datadir / prefix[1:], prefix) tar(datadir) # Create 'control.tar.xz' tarball that contains the control files. util.make_dir(controldir) datadir_files = sorted(util.walk_files(datadir)) - datadir_size = sum(os.path.getsize(fpath) for fpath in datadir_files) - with open(os.path.join(controldir, 'control'), 'w') as f: + datadir_size = sum(fpath.stat().st_size for fpath in datadir_files) + with (controldir / 'control').open('w') as f: f.write(self._get_control_snippet(package, version, datadir_size)) - with open(os.path.join(controldir, 'md5sums'), 'w') as f: + with (controldir / 'md5sums').open('w') as f: f.writelines('%s %s\n' % (util.md5(fpath).hexdigest(), - os.path.relpath(fpath, datadir)) + fpath.relative_to(datadir)) for fpath in datadir_files) tar(controldir) - path = os.path.join(rootdir, 'output.txz') + path = rootdir / 'output.txz' subprocess.check_call([ - os.path.join(rootdir, 'bin/llvm-ar'), 'rc', path, + rootdir / 'bin/llvm-ar', 'rc', path, debian_binary, controldir + '.tar.xz', datadir + '.tar.xz', ]) return path @@ -263,7 +263,7 @@ def __init__(self, old_path, new_path): # packages we're going to build. self._existing = collections.defaultdict(FullVersion) if old_path: - for root, dirs, files in os.walk(old_path): + for root, dirs, files in util.walk(old_path): for filename in files: parts = filename.rsplit('-', 1) if len(parts) == 2 and parts[1].endswith('.txz'): @@ -293,9 +293,9 @@ def package(self, package, version): # The package needs to be installed in /usr/local/<arch> on the # FreeBSD system. - installdir = os.path.join(config.DIR_BUILDROOT, 'install') + installdir = config.DIR_BUILDROOT / 'install' arch = package.get_arch() - prefix = os.path.join('/usr/local', arch) + prefix = '/usr/local' / arch package.extract(installdir, prefix) files = sorted(util.walk_files(installdir)) @@ -313,7 +313,7 @@ def package(self, package, version): '"flatsize":%(flatsize)d,' '"desc":"%(name)s for %(arch)s"' % { 'arch': arch, - 'flatsize': sum(os.lstat(path).st_size for path in files), + 'flatsize': sum(path.lstat().st_size for path in files), 'freebsd_name': package.get_freebsd_name(), 'homepage': package.get_homepage(), 'maintainer': package.get_maintainer(), @@ -326,21 +326,21 @@ def package(self, package, version): '\"%s\":{"origin":"devel/%s","version":"0"}' % (dep, dep) for dep in sorted(pkg.get_freebsd_name() for pkg in deps) ) - compact_manifest = os.path.join(config.DIR_BUILDROOT, + compact_manifest = config.DIR_BUILDROOT.pathjoin( '+COMPACT_MANIFEST') - with open(compact_manifest, 'w') as f: + with compact_manifest.open('w') as f: f.write(base_manifest) f.write('}') # Create the fill manifest. if files: - manifest = os.path.join(config.DIR_BUILDROOT, '+MANIFEST') - with open(manifest, 'w') as f: + manifest = config.DIR_BUILDROOT.pathjoin('+MANIFEST') + with manifest.open('w') as f: f.write(base_manifest) f.write(',"files":{') f.write(','.join( '"%s":"1$%s"' % ( - os.path.join(prefix, os.path.relpath(path, installdir)), + prefix.pathjoin(path.relative_to(installdir)), util.sha256(path).hexdigest()) for path in files)) f.write('}}') @@ -348,9 +348,9 @@ def package(self, package, version): manifest = compact_manifest # Create the package. - output = os.path.join(config.DIR_BUILDROOT, 'output.tar.xz') - listing = os.path.join(config.DIR_BUILDROOT, 'listing') - with open(listing, 'w') as f: + output = config.DIR_BUILDROOT / 'output.tar.xz' + listing = config.DIR_BUILDROOT / 'listing' + with listing.open('w') as f: # Leading files in tarball. f.write('#mtree\n') f.write( @@ -360,12 +360,12 @@ def package(self, package, version): '+MANIFEST type=file mode=0644 uname=root gname=wheel time=0 contents=%s\n' % manifest) for path in files: - fullpath = os.path.join(prefix, os.path.relpath(path, installdir)) - if os.path.islink(path): + fullpath = prefix.pathjoin(path.relative_to(installdir)) + if path.is_symlink(): # Symbolic links. f.write( '%s type=link mode=0777 uname=root gname=wheel time=0 link=%s\n' % - (fullpath, os.readlink(path))) + (fullpath, path.readlink())) else: # Regular files. f.write( @@ -389,7 +389,7 @@ def __init__(self, old_path, new_path, url): # packages we're going to build. self._existing = collections.defaultdict(FullVersion) if old_path: - for root, dirs, files in os.walk(old_path): + for root, dirs, files in util.walk(old_path): for filename in files: parts = filename.split('|', 1) if len(parts) == 2: @@ -413,21 +413,21 @@ def insert(self, package, version, source): # Create symbolic to the tarball for every supported version of # Mac OS X. filename = self._get_filename(package, version) - linksdir = os.path.join(self._new_path, 'links') + linksdir = self._new_path / 'links' util.make_dir(linksdir) for osx_version in self._OSX_VERSIONS: - link = os.path.join(linksdir, + link = linksdir.pathjoin( '%s-%s.%s.bottle.tar.gz' % ( package.get_homebrew_name(), version.get_homebrew_version(), osx_version)) util.remove(link) - os.symlink(os.path.join('..', filename), link) + link.symlink_to('..' / filename) # Create a formula. - formulaedir = os.path.join(self._new_path, 'formulae') + formulaedir = self._new_path / 'formulae' util.make_dir(formulaedir) - with open(os.path.join(formulaedir, - package.get_homebrew_name() + '.rb'), 'w') as f: + with (formulaedir.pathjoin( + package.get_homebrew_name() + '.rb')).open('w') as f: # Header. f.write("""class %(homebrew_class)s < Formula desc "%(name)s for %(arch)s" @@ -473,21 +473,21 @@ def package(self, package, version): # The package needs to be installed in /usr/local/share/<arch> # on the Mac OS X system. In the tarball, pathnames need to be # prefixed with <name>/<version>. - installdir = os.path.join(config.DIR_BUILDROOT, 'install') - extractdir = os.path.join(installdir, package.get_homebrew_name(), + installdir = config.DIR_BUILDROOT / 'install' + extractdir = installdir.pathjoin(package.get_homebrew_name(), version.get_homebrew_version()) util.make_dir(extractdir) - package.extract(os.path.join(extractdir, 'share', package.get_arch()), - os.path.join('/usr/local/share', package.get_arch())) + package.extract(extractdir.pathjoin('share', package.get_arch()), + '/usr/local/share'.pathjoin(package.get_arch())) # Add a placeholder install receipt file. Homebrew depends on it # being present with at least these fields. - with open(os.path.join(extractdir, 'INSTALL_RECEIPT.json'), 'w') as f: + with (extractdir / 'INSTALL_RECEIPT.json').open('w') as f: f.write('{"used_options":[],"unused_options":[]}\n') # Archive the results. self._sanitize_permissions(installdir, directory_mode=0o755) - output = os.path.join(config.DIR_BUILDROOT, 'output.tar.gz') + output = config.DIR_BUILDROOT / 'output.tar.gz' self._run_tar([ '--options', 'gzip:!timestamp', '-czf', output, '-C', installdir, package.get_homebrew_name(), @@ -516,15 +516,15 @@ def package(self, package, version): # The package needs to be installed in /usr/pkg/<arch> on the # NetBSD system. - installdir = os.path.join(config.DIR_BUILDROOT, 'install') + installdir = config.DIR_BUILDROOT / 'install' arch = package.get_arch() - prefix = os.path.join('/usr/pkg', arch) + prefix = '/usr/pkg' / arch package.extract(installdir, prefix) files = sorted(util.walk_files(installdir)) # Package contents list. util.make_dir(installdir) - with open(os.path.join(installdir, '+CONTENTS'), 'w') as f: + with installdir.pathjoin('+CONTENTS').open('w') as f: f.write( '@cwd /usr/pkg/%s\n' '@name %s-%s\n' % ( @@ -534,12 +534,12 @@ def package(self, package, version): for pkg in package.get_lib_depends()): f.write('@pkgdep %s-[0-9]*\n' % dep) for path in files: - f.write(os.path.relpath(path, installdir) + '\n') + f.write(path.relative_to(installdir) + '\n') # Package description. - with open(os.path.join(installdir, '+COMMENT'), 'w') as f: + with installdir.pathjoin('+COMMENT').open('w') as f: f.write('%s for %s\n' % (package.get_name(), package.get_arch())) - with open(os.path.join(installdir, '+DESC'), 'w') as f: + with installdir.pathjoin('+DESC').open('w') as f: f.write( '%(name)s for %(arch)s\n' '\n' @@ -556,7 +556,7 @@ def package(self, package, version): # system, meaning that these packages are currently only # installable on NetBSD/x86-64. Figure out a way we can create # packages that are installable on any system that uses pkgsrc. - with open(os.path.join(installdir, '+BUILD_INFO'), 'w') as f: + with installdir.pathjoin('+BUILD_INFO').open('w') as f: f.write( 'MACHINE_ARCH=x86_64\n' 'PKGTOOLS_VERSION=00000000\n' @@ -565,12 +565,12 @@ def package(self, package, version): ) self._sanitize_permissions(installdir) - output = os.path.join(config.DIR_BUILDROOT, 'output.tar.xz') - listing = os.path.join(config.DIR_BUILDROOT, 'listing') - with open(listing, 'w') as f: + output = config.DIR_BUILDROOT / 'output.tar.xz' + listing = config.DIR_BUILDROOT / 'listing' + with listing.open('w') as f: f.write('+CONTENTS\n+COMMENT\n+DESC\n+BUILD_INFO\n') for path in files: - f.write(os.path.relpath(path, installdir) + '\n') + f.write(path.relative_to(installdir) + '\n') self._run_tar(['-cJf', output, '-C', installdir, '-T', listing]) return output @@ -596,15 +596,15 @@ def package(self, package, version): # The package needs to be installed in /usr/local/<arch> on the # OpenBSD system. - installdir = os.path.join(config.DIR_BUILDROOT, 'install') + installdir = config.DIR_BUILDROOT / 'install' arch = package.get_arch() - prefix = os.path.join('/usr/local', arch) + prefix = '/usr/local' / arch package.extract(installdir, prefix) files = sorted(util.walk_files(installdir)) # Package contents list. - contents = os.path.join(config.DIR_BUILDROOT, 'contents') - with open(contents, 'w') as f: + contents = config.DIR_BUILDROOT / 'contents' + with contents.open('w') as f: f.write( '@name %s-%s\n' '@cwd %s\n' % ( @@ -614,19 +614,19 @@ def package(self, package, version): written_dirs = set() for path in files: # Write entry for parent directories. - relpath = os.path.relpath(path, installdir) + relpath = path.relative_to(installdir) fullpath = '' - for component in os.path.dirname(relpath).split('/'): + for component in relpath.parent.split('/'): fullpath += component + '/' if fullpath not in written_dirs: f.write(fullpath + '\n') written_dirs.add(fullpath) - if os.path.islink(path): + if path.is_symlink(): # Write entry for symbolic link. f.write( '%s\n' - '@symlink %s\n' % (relpath, os.readlink(path))) + '@symlink %s\n' % (relpath, path.readlink())) else: # Write entry for regular file. f.write( @@ -636,11 +636,11 @@ def package(self, package, version): relpath, str(base64.b64encode( util.sha256(path).digest()), encoding='ASCII'), - os.lstat(path).st_size)) + path.lstat().st_size)) # Package description. - desc = os.path.join(config.DIR_BUILDROOT, 'desc') - with open(desc, 'w') as f: + desc = config.DIR_BUILDROOT / 'desc' + with desc.open('w') as f: f.write( '%(name)s for %(arch)s\n' '\n' @@ -655,9 +655,9 @@ def package(self, package, version): } ) - output = os.path.join(config.DIR_BUILDROOT, 'output.tar.gz') - listing = os.path.join(config.DIR_BUILDROOT, 'listing') - with open(listing, 'w') as f: + output = config.DIR_BUILDROOT / 'output.tar.gz' + listing = config.DIR_BUILDROOT / 'listing' + with listing.open('w') as f: # Leading files in tarball. f.write('#mtree\n') f.write( @@ -667,12 +667,12 @@ def package(self, package, version): '+DESC type=file mode=0666 uname=root gname=wheel time=0 contents=%s\n' % desc) for path in files: - relpath = os.path.relpath(path, installdir) - if os.path.islink(path): + relpath = path.relative_to(installdir) + if path.is_symlink(): # Symbolic links need to use 0o555 on OpenBSD. f.write( '%s type=link mode=0555 uname=root gname=wheel time=0 link=%s\n' % - (relpath, os.readlink(path))) + (relpath, path.readlink())) else: # Regular files. f.write( @@ -691,7 +691,7 @@ def __init__(self, old_path, new_path): self._existing = collections.defaultdict(FullVersion) if old_path: - for root, dirs, files in os.walk(old_path): + for root, dirs, files in util.walk(old_path): for filename in files: parts = filename.rsplit('-', 3) if len(parts) == 4 and parts[3] == 'any.pkg.tar.xz': @@ -717,15 +717,15 @@ def package(self, package, version): package.initialize_buildroot({'libarchive'}) log.info('PKG %s', self._get_filename(package, version)) - installdir = os.path.join(config.DIR_BUILDROOT, 'install') + installdir = config.DIR_BUILDROOT / 'install' arch = package.get_arch() - prefix = os.path.join('/usr', arch) - package.extract(os.path.join(installdir, prefix[1:]), prefix) + prefix = '/usr' / arch + package.extract(installdir / prefix[1:], prefix) files = sorted(util.walk_files(installdir)) util.make_dir(installdir) - pkginfo = os.path.join(installdir, '.PKGINFO') - with open(pkginfo, 'w') as f: + pkginfo = installdir / '.PKGINFO' + with pkginfo.open('w') as f: f.write( 'pkgname = %(archlinux_name)s\n' 'pkgdesc = %(name)s for %(arch)s\n' @@ -734,7 +734,7 @@ def package(self, package, version): 'arch = any\n' % { 'arch': package.get_arch(), 'archlinux_name': package.get_archlinux_name(), - 'flatsize': sum(os.lstat(path).st_size for path in files), + 'flatsize': sum(path.lstat().st_size for path in files), 'name': package.get_name(), 'version': version.get_archlinux_version(), } @@ -743,27 +743,27 @@ def package(self, package, version): for dep in sorted(pkg.get_archlinux_name() for pkg in package.get_lib_depends()): f.write('depend = %s\n' % dep) - output = os.path.join(config.DIR_BUILDROOT, 'output.tar.xz') - listing = os.path.join(config.DIR_BUILDROOT, 'listing') - with open(listing, 'w') as f: + output = config.DIR_BUILDROOT / 'output.tar.xz' + listing = config.DIR_BUILDROOT / 'listing' + with listing.open('w') as f: f.write('.PKGINFO\n') for path in files: - f.write(os.path.relpath(path, installdir) + '\n') + f.write(path.relative_to(installdir) + '\n') - mtree = os.path.join(installdir, '.MTREE') + mtree = installdir / '.MTREE' - with open(listing, 'w') as f: + with listing.open('w') as f: f.write('#mtree\n') f.write( '.PKGINFO type=file mode=0644 uname=root gname=root time=0 contents=%s\n' % pkginfo) f.write( '.MTREE type=file mode=0644 uname=root gname=root time=0 contents=%s\n' % mtree) for path in files: - relpath = os.path.relpath(path, installdir) - if os.path.islink(path): + relpath = path.relative_to(installdir) + if path.is_symlink(): f.write( '%s type=link mode=0777 uname=root gname=root time=0 link=%s\n' % - (relpath, os.readlink(path))) + (relpath, path.readlink())) else: f.write( '%s type=file mode=0%o uname=root gname=root time=0 contents=%s\n' % @@ -787,9 +787,9 @@ def finish(self, private_key): subprocess.check_call([ 'gpg', '--detach-sign', '--local-user', private_key, '--no-armor', '--digest-algo', 'SHA256', - os.path.join(self._new_path, package_file)]) - db_file = os.path.join(self._new_path, 'cloudabi-ports.db.tar.xz') - packages = [os.path.join(self._new_path, self._get_filename(*p)) for p in self._packages] + self._new_path / package_file]) + db_file = self._new_path / 'cloudabi-ports.db.tar.xz' + packages = [self._new_path.pathjoin(self._get_filename(*p)) for p in self._packages] # Ensure that repo-add as a valid working directory. os.chdir('/') subprocess.check_call(['repo-add', '-s', '-k', private_key, db_file] + packages) @@ -802,7 +802,7 @@ def __init__(self, old_path, new_path): self._existing = collections.defaultdict(FullVersion) if old_path: - for root, dirs, files in os.walk(old_path): + for root, dirs, files in util.walk(old_path): for filename in files: if filename.endswith('.tar.xz'): parts = filename[:-7].rsplit('-', 2) @@ -825,15 +825,15 @@ def package(self, package, version): package.initialize_buildroot({'libarchive'}) log.info('PKG %s', self._get_filename(package, version)) - installdir = os.path.join(config.DIR_BUILDROOT, 'install') + installdir = config.DIR_BUILDROOT / 'install' arch = package.get_arch() - prefix = os.path.join('/usr', arch) - package.extract(os.path.join(installdir, prefix[1:]), prefix) + prefix = '/usr' / arch + package.extract(installdir / prefix[1:], prefix) files = sorted(util.walk_files(installdir)) util.make_dir(installdir) - output = os.path.join(config.DIR_BUILDROOT, 'output.tar.xz') + output = config.DIR_BUILDROOT / 'output.tar.xz' self._run_tar(['-cJf', output, '-C', installdir, '.']) @@ -841,17 +841,17 @@ def package(self, package, version): def finish(self, private_key): for cygwin_arch in ('x86', 'x86_64'): - cygwin_arch_dir = os.path.join(self._new_path, cygwin_arch) + cygwin_arch_dir = self._new_path / cygwin_arch util.make_dir(cygwin_arch_dir) - setup_file = os.path.join(cygwin_arch_dir, 'setup.ini') - with open(setup_file, 'w') as f: + setup_file = cygwin_arch_dir / 'setup.ini' + with setup_file.open('w') as f: f.write('release: cygwin\n') f.write('arch: %s\n' % cygwin_arch) f.write('setup-timestamp: %d\n' % int(time.time())) for package, version in sorted(self._packages, key=lambda p:p[0].get_cygwin_name()): package_file_name = self._get_filename(package, version) - package_file = os.path.join(self._new_path, package_file_name) + package_file = self._new_path / package_file_name f.write( '\n' '@ %(cygwinname)s\n' @@ -872,7 +872,7 @@ def finish(self, private_key): ); f.write( 'install: %(filename)s %(size)s %(sha512)s\n' % { - 'size': os.lstat(package_file).st_size, + 'size': package_file.lstat().st_size, 'filename': package_file_name, 'sha512': util.sha512(package_file).hexdigest(), } @@ -896,20 +896,20 @@ def _get_filename(package, version): @staticmethod def _file_linkto(filename): try: - return os.readlink(filename) + return filename.readlink() except OSError: return '' @staticmethod def _file_md5(filename): - if os.path.islink(filename): + if filename.is_symlink(): return '' else: return util.md5(filename).hexdigest() @staticmethod def _file_mode(filename): - mode = os.lstat(filename).st_mode + mode = filename.lstat().st_mode if stat.S_ISLNK(mode): # Symbolic links. return 0o120777 - 65536 @@ -922,7 +922,7 @@ def _file_mode(filename): @staticmethod def _file_size(filename): - sb = os.lstat(filename) + sb = filename.lstat() if stat.S_ISREG(sb.st_mode): return sb.st_size return 0 @@ -938,27 +938,27 @@ def package(self, package, version): # The package needs to be installed in /usr/arch> on the Red Hat # system. - installdir = os.path.join(config.DIR_BUILDROOT, 'install') + installdir = config.DIR_BUILDROOT / 'install' arch = package.get_arch() - prefix = os.path.join('/usr', arch) + prefix = '/usr' / arch package.extract(installdir, prefix) files = sorted(util.walk_files(installdir)) # Create an xz compressed cpio payload containing all files. - listing = os.path.join(config.DIR_BUILDROOT, 'listing') - with open(listing, 'w') as f: + listing = config.DIR_BUILDROOT / 'listing' + with listing.open('w') as f: f.write('#mtree\n') for path in files: - relpath = os.path.join(prefix, os.path.relpath(path, installdir)) - if os.path.islink(path): + relpath = prefix.pathjoin(path.relative_to(installdir)) + if path.is_symlink(): f.write( '%s type=link mode=0777 uname=root gname=root time=0 link=%s\n' % - (relpath, os.readlink(path))) + (relpath, path.readlink())) else: f.write( '%s type=file mode=0%o uname=root gname=root time=0 contents=%s\n' % (relpath, self._get_suggested_mode(path), path)) - data = os.path.join(config.DIR_BUILDROOT, 'data.cpio.xz') + data = config.DIR_BUILDROOT / 'data.cpio.xz' self._run_tar([ '-cJf', data, '--format=newc', '-C', installdir, '@' + listing, ]) @@ -969,7 +969,7 @@ def package(self, package, version): name = package.get_redhat_name() lib_depends = sorted(dep.get_redhat_name() for dep in package.get_lib_depends()) - dirs = sorted({os.path.dirname(f) for f in files}) + dirs = sorted({f.parent for f in files}) header = bytes(rpm.Header({ 100: rpm.StringArray(['C']), 1000: rpm.String(name), @@ -984,7 +984,7 @@ def package(self, package, version): 1020: rpm.String(package.get_homepage()), 1021: rpm.String('linux'), 1022: rpm.String('noarch'), - 1028: rpm.Int32(os.lstat(f).st_size for f in files), + 1028: rpm.Int32(f.lstat().st_size for f in files), 1030: rpm.Int16(self._file_mode(f) for f in files), 1033: rpm.Int16(0 for f in files), 1034: rpm.Int32(0 for f in files), @@ -1002,10 +1002,10 @@ def package(self, package, version): 1097: rpm.StringArray('' for f in files), 1112: rpm.Int32(8 for dep in lib_depends), 1113: rpm.StringArray([version.get_redhat_version()]), - 1116: rpm.Int32(dirs.index(os.path.dirname(f)) for f in files), - 1117: rpm.StringArray(os.path.basename(f) for f in files), - 1118: rpm.StringArray(os.path.join(prefix, - os.path.relpath(d, installdir)) + + 1116: rpm.Int32(dirs.index(f.parent) for f in files), + 1117: rpm.StringArray(f.name for f in files), + 1118: rpm.StringArray(prefix.pathjoin( + d.relative_to(installdir)) + '/' for d in dirs), 1124: rpm.String('cpio'), @@ -1018,13 +1018,13 @@ def package(self, package, version): checksum.update(header) util.hash_file(data, checksum) signature = bytes(rpm.Header({ - 1000: rpm.Int32([len(header) + os.stat(data).st_size]), + 1000: rpm.Int32([len(header) + data.stat().st_size]), 1004: rpm.Bin(checksum.digest()), })) # Create the RPM file. - output = os.path.join(config.DIR_BUILDROOT, 'output.rpm') - with open(output, 'wb') as f: + output = config.DIR_BUILDROOT / 'output.rpm' + with output.open('wb') as f: # The lead. f.write(b'\xed\xab\xee\xdb\x03\x00\x00\x00\x00\x00') fullname = '%s-%s' % (name, version.get_redhat_version()) @@ -1040,7 +1040,7 @@ def package(self, package, version): f.write(header) # The payload. - with open(data, 'rb') as fin: + with data.open('rb') as fin: shutil.copyfileobj(fin, f) return output @@ -1048,5 +1048,5 @@ def finish(self, private_key): subprocess.check_call(['createrepo', self._new_path]) subprocess.check_call([ 'gpg', '--detach-sign', '--local-user', private_key, - '--armor', os.path.join(self._new_path, 'repodata/repomd.xml'), + '--armor', self._new_path / 'repodata/repomd.xml', ]) diff --git a/src/catalog_set.py b/src/catalog_set.py index 3361ae6..8fa9c69 100644 --- a/src/catalog_set.py +++ b/src/catalog_set.py @@ -33,8 +33,8 @@ def _build_at_version(self, package, version, tmpdir): return False else: # A new package. Keep it. - new = os.path.join(tmpdir, str(len(do_rebuild))) - os.rename(path, new) + new = tmpdir.pathjoin(str(len(do_rebuild))) + path.rename(new) do_rebuild.append(catalog) # Round 2: Do a rebuild to ensure that the build is @@ -43,7 +43,7 @@ def _build_at_version(self, package, version, tmpdir): if do_rebuild: package.clean() for idx, catalog in enumerate(do_rebuild): - path1 = os.path.join(tmpdir, str(idx)) + path1 = tmpdir.pathjoin(str(idx)) path2 = catalog.package(package, version) if not util.file_contents_equal(path1, path2): raise Exception( diff --git a/src/distfile.py b/src/distfile.py index 2710760..05d359b 100644 --- a/src/distfile.py +++ b/src/distfile.py @@ -20,7 +20,7 @@ class Distfile: def __init__(self, distdir, name, checksum, master_sites, patches, unsafe_string_sources): for patch in patches: - if not os.path.isfile(patch): + if not patch.is_file(): raise Exception('Patch %s does not exist' % patch) self._distdir = distdir @@ -28,12 +28,12 @@ def __init__(self, distdir, name, checksum, master_sites, patches, self._checksum = checksum self._patches = patches self._unsafe_string_sources = unsafe_string_sources - self._pathname = os.path.join(distdir, self._name) + self._pathname = distdir / self._name # Compute distfile URLs based on the provided list of sites. # Also add fallback URLs in case the master sites are down. self._urls = { - site + os.path.basename(self._name) for site in master_sites + site + self._name.name for site in master_sites } | { site + self._name for site in config.FALLBACK_MIRRORS } @@ -43,12 +43,12 @@ def _apply_patch(patch, target): # Automatically determine the patchlevel by taking a look at the # first filename in the patch. patchlevel = 0 - with open(patch, 'rb') as f: + with patch.open('rb') as f: for l in f.readlines(): if l.startswith(b'--- '): filename = str(l[4:-1].split(b'\t', 1)[0], encoding='ASCII') while True: - if os.path.exists(os.path.join(target, filename)): + if (target / filename).exists(): # Correct patchlevel determined. break # Increment patchlevel once more. @@ -63,31 +63,31 @@ def _apply_patch(patch, target): break # Apply the patch. - with open(patch) as f: + with patch.open() as f: subprocess.check_call( ['patch', '-d', target, '-tsp%d' % patchlevel], stdin=f) # Delete .orig files that patch leaves behind. for path in util.walk_files(target): if path.endswith('.orig'): - os.unlink(path) + path.unlink() def _extract_unpatched(self, target): # Fetch and extract tarball. self._fetch() - tar = os.path.join(config.DIR_BUILDROOT, 'bin/bsdtar') - if not os.path.exists(tar): + tar = config.DIR_BUILDROOT / 'bin/bsdtar' + if not tar.exists(): tar = 'tar' util.make_dir(target) subprocess.check_call([tar, '-xC', target, '-f', self._pathname]) # Remove leading directory names. while True: - entries = os.listdir(target) + entries = list(target.iterdir()) if len(entries) != 1: return target - subdir = os.path.join(target, entries[0]) - if not os.path.isdir(subdir): + subdir = target / entries[0] + if not subdir.is_dir(): return target target = subdir @@ -105,7 +105,7 @@ def _fetch(self): log.info('FETCH %s', url) try: util.make_parent_dir(self._pathname) - with util.unsafe_fetch(url) as fin, open(self._pathname, 'wb') as fout: + with util.unsafe_fetch(url) as fin, self._pathname.open('wb') as fout: shutil.copyfileobj(fin, fout) except ConnectionResetError as e: log.warning(e) @@ -120,12 +120,12 @@ def extract(self, target): self._apply_patch(patch, target) # Add markers to sources that depend on unsafe string sources. for filename in self._unsafe_string_sources: - path = os.path.join(target, filename) - with open(path, 'rb') as fin, open(path + '.new', 'wb') as fout: + path = target / filename + with path.open('rb') as fin, (path + '.new').open('wb') as fout: fout.write(bytes('#define _CLOUDLIBC_UNSAFE_STRING_FUNCTIONS\n', encoding='ASCII')) fout.write(fin.read()) - os.rename(path + '.new', path) + (path + '.new').rename(path) return target def fixup_patches(self, tmpdir): @@ -133,15 +133,15 @@ def fixup_patches(self, tmpdir): return # Extract one copy of the code to diff against. util.remove(tmpdir) - orig_dir = self._extract_unpatched(os.path.join(tmpdir, 'orig')) + orig_dir = self._extract_unpatched(tmpdir / 'orig') for path in util.walk_files(orig_dir): if path.endswith('.orig'): - os.unlink(path) + path.unlink() for patch in sorted(self._patches): log.info('FIXUP %s', patch) # Apply individual patches to the code. - patched_dir = os.path.join(tmpdir, 'patched') + patched_dir = tmpdir / 'patched' util.remove(patched_dir) patched_dir = self._extract_unpatched(patched_dir) self._apply_patch(patch, patched_dir) diff --git a/src/package.py b/src/package.py index 033c675..300b692 100644 --- a/src/package.py +++ b/src/package.py @@ -52,7 +52,7 @@ def _initialize_buildroot(self): def build(self): # Skip this package if it has been built already. - if os.path.isdir(self._install_directory): + if self._install_directory.is_dir(): return # Perform the build inside an empty buildroot. @@ -100,7 +100,7 @@ def __str__(self): def build(self): # Skip this package if it has been built already. - if not self._build_cmd or os.path.isdir(self._install_directory): + if not self._build_cmd or self._install_directory.is_dir(): return # Perform the build inside a buildroot with its dependencies @@ -129,10 +129,10 @@ def extract(self, path, expandpath): if target_file.endswith('.template'): # File is a template. Expand %%PREFIX%% tags. target_file = target_file[:-9] - with open(source_file, 'r') as f: + with source_file.open('r') as f: contents = f.read() contents = contents.replace('%%PREFIX%%', expandpath) - with open(target_file, 'w') as f: + with target_file.open('w') as f: f.write(contents) shutil.copymode(source_file, target_file) else: @@ -198,6 +198,6 @@ def initialize_buildroot(self, host_depends, lib_depends=set()): util.remove_and_make_dir(config.DIR_BUILDROOT) for dep in host_deps: dep.extract() - prefix = os.path.join(config.DIR_BUILDROOT, self._arch) + prefix = config.DIR_BUILDROOT / self._arch for dep in lib_depends: dep.extract(prefix, prefix) diff --git a/src/repository.py b/src/repository.py index 8bdae71..351a053 100644 --- a/src/repository.py +++ b/src/repository.py @@ -43,16 +43,16 @@ def op_distfile(**kwargs): break # Automatically add patches if none are given. - dirname = os.path.dirname(path) + dirname = path.parent if 'patches' not in distfile: distfile['patches'] = (name[6:] - for name in os.listdir(dirname) + for name in list(dirname.iterdir()) if name.startswith('patch-')) if 'unsafe_string_sources' not in distfile: distfile['unsafe_string_sources'] = frozenset() # Turn patch filenames into full paths. - distfile['patches'] = {os.path.join(dirname, 'patch-' + patch) + distfile['patches'] = {dirname.pathjoin('patch-' + patch) for patch in distfile['patches']} if name in self._distfiles: @@ -64,7 +64,7 @@ def op_distfile(**kwargs): def op_host_package(**kwargs): package = kwargs - package['resource_directory'] = os.path.dirname(path) + package['resource_directory'] = path.parent name = package['name'] if name in self._deferred_host_packages: raise Exception('%s is redeclaring packages %s' % (path, name)) @@ -72,7 +72,7 @@ def op_host_package(**kwargs): def op_package(**kwargs): package = kwargs - package['resource_directory'] = os.path.dirname(path) + package['resource_directory'] = path.parent name = package['name'] for arch in config.ARCHITECTURES: if (name, arch) in self._deferred_target_packages: @@ -115,7 +115,7 @@ def op_sites_sourceforge(suffix): 'sites_sourceforge': op_sites_sourceforge, } - with open(path, 'r') as f: + with path.open('r') as f: exec(f.read(), identifiers, identifiers) def get_distfiles(self): @@ -141,8 +141,8 @@ def get_host_package(name): del package['lib_depends'] package['version'] = SimpleVersion(package['version']) self._host_packages[name] = HostPackage( - install_directory=os.path.join( - self._install_directory, + install_directory=( + self._install_directory).pathjoin( 'host', name), distfiles=self._distfiles, @@ -172,8 +172,8 @@ def get_target_package(name, arch): del package['lib_depends'] package['version'] = SimpleVersion(package['version']) self._target_packages[(name, arch)] = TargetPackage( - install_directory=os.path.join( - self._install_directory, arch, name), + install_directory=( + self._install_directory) / arch / name, arch=arch, distfiles=self._distfiles, host_packages=self._host_packages, @@ -192,8 +192,7 @@ def get_target_package(name, arch): packages = self._target_packages.copy() for arch in config.ARCHITECTURES: packages[('everything', arch)] = TargetPackage( - install_directory=os.path.join(self._install_directory, arch, - 'everything'), + install_directory=self._install_directory / arch / 'everything', arch=arch, name='everything', version=SimpleVersion('1.0'),