From 897025c9c68798712f9f80c03b9f3e99a82ef087 Mon Sep 17 00:00:00 2001 From: cclauss Date: Mon, 15 Oct 2018 14:09:52 +0200 Subject: [PATCH 1/4] Some fixes for Python 3 --- configure.py | 3 +- deps/openssl/openssl/fuzz/helper.py | 3 +- test/message/testcfg.py | 37 +++-- test/pseudo-tty/testcfg.py | 35 +++-- test/testpy/__init__.py | 5 + tools/compress_json.py | 6 + tools/configure.d/nodedownload.py | 15 +- tools/genv8constants.py | 13 +- tools/getnodeversion.py | 1 + tools/gyp/PRESUBMIT.py | 4 + tools/gyp/buildbot/buildbot_run.py | 23 +-- tools/gyp/gyptest.py | 2 +- tools/gyp/pylib/gyp/MSVSNew.py | 5 + tools/gyp/pylib/gyp/MSVSSettings.py | 26 ++-- tools/gyp/pylib/gyp/MSVSVersion.py | 2 +- tools/gyp/pylib/gyp/__init__.py | 27 +++- tools/gyp/pylib/gyp/common.py | 8 +- tools/gyp/pylib/gyp/easy_xml.py | 5 + tools/gyp/pylib/gyp/flock_tool.py | 2 +- tools/gyp/pylib/gyp/generator/analyzer.py | 79 +++++----- tools/gyp/pylib/gyp/generator/cmake.py | 11 +- .../gyp/generator/dump_dependency_json.py | 3 +- tools/gyp/pylib/gyp/generator/make.py | 15 +- tools/gyp/pylib/gyp/generator/msvs.py | 15 +- tools/gyp/pylib/gyp/generator/ninja.py | 13 +- tools/gyp/pylib/gyp/generator/xcode.py | 17 ++- tools/gyp/pylib/gyp/input.py | 33 +++-- tools/gyp/pylib/gyp/mac_tool.py | 13 +- tools/gyp/pylib/gyp/simple_copy.py | 11 ++ tools/gyp/pylib/gyp/win_tool.py | 11 +- tools/gyp/pylib/gyp/xcode_emulation.py | 15 +- tools/gyp/pylib/gyp/xcode_ninja.py | 2 +- tools/gyp/pylib/gyp/xcodeproj_file.py | 19 ++- tools/gyp/tools/graphviz.py | 29 ++-- tools/gyp/tools/pretty_gyp.py | 9 +- tools/gyp/tools/pretty_sln.py | 47 +++--- tools/gyp/tools/pretty_vcproj.py | 21 ++- tools/gyp_node.py | 3 +- tools/icu/icutrim.py | 87 ++++++----- tools/icu/shrink-icu-src.py | 37 ++--- .../CheckProtocolCompatibility.py | 5 +- tools/inspector_protocol/CodeGenerator.py | 7 + .../ConvertProtocolToJSON.py | 3 +- tools/install.py | 13 +- tools/jinja2/_compat.py | 2 + tools/jinja2/bccache.py | 2 + tools/js2c.py | 5 + tools/markupsafe/_compat.py | 2 + tools/run-valgrind.py | 5 +- tools/specialize_node_d.py | 7 +- tools/test.py | 137 ++++++++++-------- 51 files changed, 542 insertions(+), 358 deletions(-) diff --git a/configure.py b/configure.py index ae44051edf4ff6..34b7c895d86d98 100755 --- a/configure.py +++ b/configure.py @@ -1,3 +1,4 @@ +from __future__ import print_function import json import sys import errno @@ -588,7 +589,7 @@ def print_verbose(x): if not options.verbose: return if type(x) is str: - print x + print(x) else: pprint.pprint(x, indent=2) diff --git a/deps/openssl/openssl/fuzz/helper.py b/deps/openssl/openssl/fuzz/helper.py index f5f9d77daaf259..40a1dc0202704c 100755 --- a/deps/openssl/openssl/fuzz/helper.py +++ b/deps/openssl/openssl/fuzz/helper.py @@ -11,6 +11,7 @@ fuzzer.py """ +from __future__ import print_function import os import subprocess @@ -45,7 +46,7 @@ def main(): cmd = ([os.path.abspath(os.path.join(THIS_DIR, FUZZER))] + sys.argv[2:] + ["-artifact_prefix=" + corpora[1] + "/"] + corpora) - print " ".join(cmd) + print(" ".join(cmd)) subprocess.call(cmd) if __name__ == "__main__": diff --git a/test/message/testcfg.py b/test/message/testcfg.py index 819dfa12c5b631..b2e36c8b887023 100644 --- a/test/message/testcfg.py +++ b/test/message/testcfg.py @@ -25,13 +25,26 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +from __future__ import print_function + import test import os from os.path import join, exists, basename, isdir import re +try: + reduce # Python 2 +except NameError: # Python 3 + from functools import reduce + +try: + xrange # Python 2 +except NameError: + xrange = range # Python 3 + FLAGS_PATTERN = re.compile(r"//\s+Flags:(.*)") + class MessageTestCase(test.TestCase): def __init__(self, path, file, expected, arch, mode, context, config): @@ -48,7 +61,7 @@ def IgnoreLine(self, str): else: return str.startswith('==') or str.startswith('**') def IsFailureOutput(self, output): - f = file(self.expected) + f = open(self.expected) # Skip initial '#' comment and spaces #for line in f: # if (not line.startswith('#')) and (not line.strip()): @@ -67,22 +80,22 @@ def IsFailureOutput(self, output): raw_lines = (output.stdout + output.stderr).split('\n') outlines = [ s for s in raw_lines if not self.IgnoreLine(s) ] if len(outlines) != len(patterns): - print "length differs." - print "expect=%d" % len(patterns) - print "actual=%d" % len(outlines) - print "patterns:" + print("length differs.") + print("expect=%d" % len(patterns)) + print("actual=%d" % len(outlines)) + print("patterns:") for i in xrange(len(patterns)): - print "pattern = %s" % patterns[i] - print "outlines:" + print("pattern = %s" % patterns[i]) + print("outlines:") for i in xrange(len(outlines)): - print "outline = %s" % outlines[i] + print("outline = %s" % outlines[i]) return True for i in xrange(len(patterns)): if not re.match(patterns[i], outlines[i]): - print "match failed" - print "line=%d" % i - print "expect=%s" % patterns[i] - print "actual=%s" % outlines[i] + print("match failed") + print("line=%d" % i) + print("expect=%s" % patterns[i]) + print("actual=%s" % outlines[i]) return True return False diff --git a/test/pseudo-tty/testcfg.py b/test/pseudo-tty/testcfg.py index 920789844583de..0108ea16d852a0 100644 --- a/test/pseudo-tty/testcfg.py +++ b/test/pseudo-tty/testcfg.py @@ -1,3 +1,4 @@ +from __future__ import print_function # Copyright 2008 the V8 project authors. All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are @@ -31,6 +32,16 @@ import re import utils +try: + reduce # Python 2 +except NameError: # Python 3 + from functools import reduce + +try: + xrange # Python 2 +except NameError: + xrange = range # Python 3 + FLAGS_PATTERN = re.compile(r"//\s+Flags:(.*)") class TTYTestCase(test.TestCase): @@ -50,7 +61,7 @@ def IgnoreLine(self, str): else: return str.startswith('==') or str.startswith('**') def IsFailureOutput(self, output): - f = file(self.expected) + f = open(self.expected) # Convert output lines to regexps that we can match env = { 'basename': basename(self.file) } patterns = [ ] @@ -65,22 +76,22 @@ def IsFailureOutput(self, output): raw_lines = (output.stdout + output.stderr).split('\n') outlines = [ s.strip() for s in raw_lines if not self.IgnoreLine(s) ] if len(outlines) != len(patterns): - print "length differs." - print "expect=%d" % len(patterns) - print "actual=%d" % len(outlines) - print "patterns:" + print("length differs.") + print("expect=%d" % len(patterns)) + print("actual=%d" % len(outlines)) + print("patterns:") for i in xrange(len(patterns)): - print "pattern = %s" % patterns[i] - print "outlines:" + print("pattern = %s" % patterns[i]) + print("outlines:") for i in xrange(len(outlines)): - print "outline = %s" % outlines[i] + print("outline = %s" % outlines[i]) return True for i in xrange(len(patterns)): if not re.match(patterns[i], outlines[i]): - print "match failed" - print "line=%d" % i - print "expect=%s" % patterns[i] - print "actual=%s" % outlines[i] + print("match failed") + print("line=%d" % i) + print("expect=%s" % patterns[i]) + print("actual=%s" % outlines[i]) return True return False diff --git a/test/testpy/__init__.py b/test/testpy/__init__.py index 8b5b2f6b48f09f..be99e3bf2d1db8 100644 --- a/test/testpy/__init__.py +++ b/test/testpy/__init__.py @@ -1,3 +1,4 @@ +from __future__ import print_function # Copyright 2008 the V8 project authors. All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are @@ -31,6 +32,10 @@ import re import ast +try: + reduce # Python 2 +except NameError: # Python 3 + from functools import reduce FLAGS_PATTERN = re.compile(r"//\s+Flags:(.*)") FILES_PATTERN = re.compile(r"//\s+Files:(.*)") diff --git a/tools/compress_json.py b/tools/compress_json.py index 34dbb878c48a27..9af95302721e8f 100644 --- a/tools/compress_json.py +++ b/tools/compress_json.py @@ -5,6 +5,12 @@ import sys import zlib +try: + xrange # Python 2 +except NameError: + xrange = range # Python 3 + + if __name__ == '__main__': fp = open(sys.argv[1]) obj = json.load(fp) diff --git a/tools/configure.d/nodedownload.py b/tools/configure.d/nodedownload.py index 3f4bc090f71bdd..b8de2d56e0dfb9 100644 --- a/tools/configure.d/nodedownload.py +++ b/tools/configure.d/nodedownload.py @@ -1,6 +1,7 @@ #!/usr/bin/env python # Moved some utilities here from ../../configure +from __future__ import print_function import urllib import hashlib import sys @@ -36,10 +37,10 @@ def retrievefile(url, targetfile): sys.stdout.write(' <%s>\nConnecting...\r' % url) sys.stdout.flush() ConfigOpener().retrieve(url, targetfile, reporthook=reporthook) - print '' # clear the line + print('') # clear the line return targetfile except: - print ' ** Error occurred while downloading\n <%s>' % url + print(' ** Error occurred while downloading\n <%s>' % url) raise def md5sum(targetfile): @@ -56,12 +57,12 @@ def unpack(packedfile, parent_path): """Unpacks packedfile into parent_path. Assumes .zip. Returns parent_path""" if zipfile.is_zipfile(packedfile): with contextlib.closing(zipfile.ZipFile(packedfile, 'r')) as icuzip: - print ' Extracting zipfile: %s' % packedfile + print(' Extracting zipfile: %s' % packedfile) icuzip.extractall(parent_path) return parent_path elif tarfile.is_tarfile(packedfile): with contextlib.closing(tarfile.TarFile.open(packedfile, 'r')) as icuzip: - print ' Extracting tarfile: %s' % packedfile + print(' Extracting tarfile: %s' % packedfile) icuzip.extractall(parent_path) return parent_path else: @@ -112,7 +113,7 @@ def parse(opt): theRet[anOpt] = True else: # future proof: ignore unknown types - print 'Warning: ignoring unknown --download= type "%s"' % anOpt + print('Warning: ignoring unknown --download= type "%s"' % anOpt) # all done return theRet @@ -122,6 +123,6 @@ def candownload(auto_downloads, package): if auto_downloads[package]: return True else: - print """Warning: Not downloading package "%s". You could pass "--download=all" - (Windows: "download-all") to try auto-downloading it.""" % package + print("""Warning: Not downloading package "%s". You could pass "--download=all" + (Windows: "download-all") to try auto-downloading it.""" % package) return False diff --git a/tools/genv8constants.py b/tools/genv8constants.py index 2b6ed8499253ec..e7f2183cab46a0 100755 --- a/tools/genv8constants.py +++ b/tools/genv8constants.py @@ -7,28 +7,29 @@ # ustack helper. # +from __future__ import print_function import re import subprocess import sys import errno if len(sys.argv) != 3: - print "usage: objsym.py outfile libv8_base.a" + print("usage: objsym.py outfile libv8_base.a") sys.exit(2); -outfile = file(sys.argv[1], 'w'); +outfile = open(sys.argv[1], 'w'); try: pipe = subprocess.Popen([ 'objdump', '-z', '-D', sys.argv[2] ], bufsize=-1, stdout=subprocess.PIPE).stdout; -except OSError, e: +except OSError as e: if e.errno == errno.ENOENT: - print ''' + print(''' Node.js compile error: could not find objdump Check that GNU binutils are installed and included in PATH - ''' + ''') else: - print 'problem running objdump: ', e.strerror + print('problem running objdump: ', e.strerror) sys.exit() diff --git a/tools/getnodeversion.py b/tools/getnodeversion.py index 59f8aabe49eceb..c9f82160c0f386 100644 --- a/tools/getnodeversion.py +++ b/tools/getnodeversion.py @@ -1,3 +1,4 @@ +from __future__ import print_function import os import re diff --git a/tools/gyp/PRESUBMIT.py b/tools/gyp/PRESUBMIT.py index 4bc1b8ca26afda..ced83e809a1c5d 100644 --- a/tools/gyp/PRESUBMIT.py +++ b/tools/gyp/PRESUBMIT.py @@ -9,6 +9,10 @@ for more details about the presubmit API built into gcl. """ +try: + xrange # Python 2 +except NameError: + xrange = range # Python 3 PYLINT_BLACKLIST = [ # TODO: fix me. diff --git a/tools/gyp/buildbot/buildbot_run.py b/tools/gyp/buildbot/buildbot_run.py index 9a2b71f1b355cf..cdd347d0bcc95a 100755 --- a/tools/gyp/buildbot/buildbot_run.py +++ b/tools/gyp/buildbot/buildbot_run.py @@ -4,6 +4,7 @@ # found in the LICENSE file. """Argument-less script to select what to run on the buildbots.""" +from __future__ import print_function import os import shutil @@ -24,14 +25,14 @@ def CallSubProcess(*args, **kwargs): with open(os.devnull) as devnull_fd: retcode = subprocess.call(stdin=devnull_fd, *args, **kwargs) if retcode != 0: - print '@@@STEP_EXCEPTION@@@' + print('@@@STEP_EXCEPTION@@@') sys.exit(1) def PrepareCmake(): """Build CMake 2.8.8 since the version in Precise is 2.8.7.""" if os.environ['BUILDBOT_CLOBBER'] == '1': - print '@@@BUILD_STEP Clobber CMake checkout@@@' + print('@@@BUILD_STEP Clobber CMake checkout@@@') shutil.rmtree(CMAKE_DIR) # We always build CMake 2.8.8, so no need to do anything @@ -39,10 +40,10 @@ def PrepareCmake(): if os.path.isdir(CMAKE_DIR): return - print '@@@BUILD_STEP Initialize CMake checkout@@@' + print('@@@BUILD_STEP Initialize CMake checkout@@@') os.mkdir(CMAKE_DIR) - print '@@@BUILD_STEP Sync CMake@@@' + print('@@@BUILD_STEP Sync CMake@@@') CallSubProcess( ['git', 'clone', '--depth', '1', @@ -53,7 +54,7 @@ def PrepareCmake(): CMAKE_DIR], cwd=CMAKE_DIR) - print '@@@BUILD_STEP Build CMake@@@' + print('@@@BUILD_STEP Build CMake@@@') CallSubProcess( ['/bin/bash', 'bootstrap', '--prefix=%s' % CMAKE_DIR], cwd=CMAKE_DIR) @@ -74,7 +75,7 @@ def GypTestFormat(title, format=None, msvs_version=None, tests=[]): if not format: format = title - print '@@@BUILD_STEP ' + title + '@@@' + print('@@@BUILD_STEP ' + title + '@@@') sys.stdout.flush() env = os.environ.copy() if msvs_version: @@ -89,17 +90,17 @@ def GypTestFormat(title, format=None, msvs_version=None, tests=[]): retcode = subprocess.call(command, cwd=ROOT_DIR, env=env, shell=True) if retcode: # Emit failure tag, and keep going. - print '@@@STEP_FAILURE@@@' + print('@@@STEP_FAILURE@@@') return 1 return 0 def GypBuild(): # Dump out/ directory. - print '@@@BUILD_STEP cleanup@@@' - print 'Removing %s...' % OUT_DIR + print('@@@BUILD_STEP cleanup@@@') + print('Removing %s...' % OUT_DIR) shutil.rmtree(OUT_DIR, ignore_errors=True) - print 'Done.' + print('Done.') retcode = 0 if sys.platform.startswith('linux'): @@ -128,7 +129,7 @@ def GypBuild(): # after the build proper that could be used for cumulative failures), # use that instead of this. This isolates the final return value so # that it isn't misattributed to the last stage. - print '@@@BUILD_STEP failures@@@' + print('@@@BUILD_STEP failures@@@') sys.exit(retcode) diff --git a/tools/gyp/gyptest.py b/tools/gyp/gyptest.py index 9930e78c7b0f1c..1a9ffca7a134ae 100755 --- a/tools/gyp/gyptest.py +++ b/tools/gyp/gyptest.py @@ -58,7 +58,7 @@ def main(argv=None): os.chdir(args.chdir) if args.path: - extra_path = [os.path.abspath(p) for p in opts.path] + extra_path = [os.path.abspath(p) for p in args.path] extra_path = os.pathsep.join(extra_path) os.environ['PATH'] = extra_path + os.pathsep + os.environ['PATH'] diff --git a/tools/gyp/pylib/gyp/MSVSNew.py b/tools/gyp/pylib/gyp/MSVSNew.py index 593f0e5b0b2e88..a5ed3777e5be3b 100644 --- a/tools/gyp/pylib/gyp/MSVSNew.py +++ b/tools/gyp/pylib/gyp/MSVSNew.py @@ -20,6 +20,11 @@ import md5 _new_md5 = md5.new +try: + cmp # Python 2 +except NameError: + def cmp(x, y): # Python 3 + return (x > y) - (x < y) # Initialize random number generator random.seed() diff --git a/tools/gyp/pylib/gyp/MSVSSettings.py b/tools/gyp/pylib/gyp/MSVSSettings.py index 99c53a857008cc..9e02fd151830a4 100644 --- a/tools/gyp/pylib/gyp/MSVSSettings.py +++ b/tools/gyp/pylib/gyp/MSVSSettings.py @@ -13,10 +13,16 @@ The MSBuild schemas were also considered. They are typically found in the MSBuild install directory, e.g. c:\Program Files (x86)\MSBuild """ +from __future__ import print_function import sys import re +try: + basestring # Python 2 +except NameError: + basestring = (str, ) # Python 3 + # Dictionaries of settings validators. The key is the tool name, the value is # a dictionary mapping setting names to validation functions. _msvs_validators = {} @@ -400,7 +406,7 @@ def _ValidateExclusionSetting(setting, settings, error_msg, stderr=sys.stderr): if unrecognized: # We don't know this setting. Give a warning. - print >> stderr, error_msg + print(error_msg, file=stderr) def FixVCMacroSlashes(s): @@ -461,9 +467,9 @@ def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr): # Invoke the translation function. try: msvs_tool[msvs_setting](msvs_value, msbuild_settings) - except ValueError, e: - print >> stderr, ('Warning: while converting %s/%s to MSBuild, ' - '%s' % (msvs_tool_name, msvs_setting, e)) + except ValueError as e: + print(('Warning: while converting %s/%s to MSBuild, ' + '%s' % (msvs_tool_name, msvs_setting, e)), file=stderr) else: _ValidateExclusionSetting(msvs_setting, msvs_tool, @@ -472,8 +478,8 @@ def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr): (msvs_tool_name, msvs_setting)), stderr) else: - print >> stderr, ('Warning: unrecognized tool %s while converting to ' - 'MSBuild.' % msvs_tool_name) + print(('Warning: unrecognized tool %s while converting to ' + 'MSBuild.' % msvs_tool_name), file=stderr) return msbuild_settings @@ -517,9 +523,9 @@ def _ValidateSettings(validators, settings, stderr): if setting in tool_validators: try: tool_validators[setting](value) - except ValueError, e: - print >> stderr, ('Warning: for %s/%s, %s' % - (tool_name, setting, e)) + except ValueError as e: + print(('Warning: for %s/%s, %s' % + (tool_name, setting, e)), file=stderr) else: _ValidateExclusionSetting(setting, tool_validators, @@ -528,7 +534,7 @@ def _ValidateSettings(validators, settings, stderr): stderr) else: - print >> stderr, ('Warning: unrecognized tool %s' % tool_name) + print(('Warning: unrecognized tool %s' % tool_name), file=stderr) # MSVS and MBuild names of the tools. diff --git a/tools/gyp/pylib/gyp/MSVSVersion.py b/tools/gyp/pylib/gyp/MSVSVersion.py index 44b958d5b3d22e..d2a1165e20c5bf 100644 --- a/tools/gyp/pylib/gyp/MSVSVersion.py +++ b/tools/gyp/pylib/gyp/MSVSVersion.py @@ -189,7 +189,7 @@ def _RegistryQuery(key, value=None): text = None try: text = _RegistryQueryBase('Sysnative', key, value) - except OSError, e: + except OSError as e: if e.errno == errno.ENOENT: text = _RegistryQueryBase('System32', key, value) else: diff --git a/tools/gyp/pylib/gyp/__init__.py b/tools/gyp/pylib/gyp/__init__.py index 668f38b60d0093..572c2a8c547d26 100755 --- a/tools/gyp/pylib/gyp/__init__.py +++ b/tools/gyp/pylib/gyp/__init__.py @@ -4,6 +4,7 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +from __future__ import print_function import copy import gyp.input import optparse @@ -14,6 +15,16 @@ import traceback from gyp.common import GypError +try: + basestring # Python 2 +except NameError: + basestring = (str, ) # Python 3 + +try: + xrange # Python 2 +except NameError: + xrange = range # Python 3 + # Default debug modes for GYP debug = {} @@ -34,8 +45,8 @@ def DebugOutput(mode, message, *args): pass if args: message %= args - print '%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]), - ctx[1], ctx[2], message) + print('%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]), + ctx[1], ctx[2], message)) def FindBuildFiles(): extension = '.gyp' @@ -226,12 +237,12 @@ def Noop(value): (action == 'store_false' and not value)): flags.append(opt) elif options.use_environment and env_name: - print >>sys.stderr, ('Warning: environment regeneration unimplemented ' + print(('Warning: environment regeneration unimplemented ' 'for %s flag %r env_name %r' % (action, opt, - env_name)) + env_name)), file=sys.stderr) else: - print >>sys.stderr, ('Warning: regeneration unimplemented for action %r ' - 'flag %r' % (action, opt)) + print(('Warning: regeneration unimplemented for action %r ' + 'flag %r' % (action, opt)), file=sys.stderr) return flags @@ -475,7 +486,7 @@ def gyp_main(args): if home_dot_gyp != None: default_include = os.path.join(home_dot_gyp, 'include.gypi') if os.path.exists(default_include): - print 'Using overrides found in ' + default_include + print('Using overrides found in ' + default_include) includes.append(default_include) # Command-line --include files come after the default include. @@ -536,7 +547,7 @@ def gyp_main(args): def main(args): try: return gyp_main(args) - except GypError, e: + except GypError as e: sys.stderr.write("gyp: %s\n" % e) return 1 diff --git a/tools/gyp/pylib/gyp/common.py b/tools/gyp/pylib/gyp/common.py index a1e1db5f1239ff..0f5e8a2961a893 100644 --- a/tools/gyp/pylib/gyp/common.py +++ b/tools/gyp/pylib/gyp/common.py @@ -363,7 +363,7 @@ def close(self): same = False try: same = filecmp.cmp(self.tmp_path, filename, False) - except OSError, e: + except OSError as e: if e.errno != errno.ENOENT: raise @@ -382,9 +382,9 @@ def close(self): # # No way to get the umask without setting a new one? Set a safe one # and then set it back to the old value. - umask = os.umask(077) + umask = os.umask(0o77) os.umask(umask) - os.chmod(self.tmp_path, 0666 & ~umask) + os.chmod(self.tmp_path, 0o666 & ~umask) if sys.platform == 'win32' and os.path.exists(filename): # NOTE: on windows (but not cygwin) rename will not replace an # existing file, so it must be preceded with a remove. Sadly there @@ -467,7 +467,7 @@ def CopyTool(flavor, out_path, generator_flags={}): ''.join([source[0], header] + source[1:])) # Make file executable. - os.chmod(tool_path, 0755) + os.chmod(tool_path, 0o755) # From Alex Martelli, diff --git a/tools/gyp/pylib/gyp/easy_xml.py b/tools/gyp/pylib/gyp/easy_xml.py index 2522efb244d0a3..9bcdddcb9bc1f7 100644 --- a/tools/gyp/pylib/gyp/easy_xml.py +++ b/tools/gyp/pylib/gyp/easy_xml.py @@ -6,6 +6,11 @@ import os import locale +try: + reduce # Python 2 +except NameError: # Python 3 + from functools import reduce + def XmlToString(content, encoding='utf-8', pretty=False): """ Writes the XML content to disk, touching the file only if it has changed. diff --git a/tools/gyp/pylib/gyp/flock_tool.py b/tools/gyp/pylib/gyp/flock_tool.py index b38d8660f72853..81fb79d136a0a5 100755 --- a/tools/gyp/pylib/gyp/flock_tool.py +++ b/tools/gyp/pylib/gyp/flock_tool.py @@ -39,7 +39,7 @@ def ExecFlock(self, lockfile, *cmd_list): # where fcntl.flock(fd, LOCK_EX) always fails # with EBADF, that's why we use this F_SETLK # hack instead. - fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0666) + fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0o666) if sys.platform.startswith('aix'): # Python on AIX is compiled with LARGEFILE support, which changes the # struct size. diff --git a/tools/gyp/pylib/gyp/generator/analyzer.py b/tools/gyp/pylib/gyp/generator/analyzer.py index 921c1a6b714328..ce7f83c932b70c 100644 --- a/tools/gyp/pylib/gyp/generator/analyzer.py +++ b/tools/gyp/pylib/gyp/generator/analyzer.py @@ -61,6 +61,7 @@ directly supplied to gyp. OTOH if both "a.gyp" and "b.gyp" are supplied to gyp then the "all" target includes "b1" and "b2". """ +from __future__ import print_function import gyp.common import gyp.ninja_syntax as ninja_syntax @@ -155,7 +156,7 @@ def _AddSources(sources, base_path, base_path_components, result): continue result.append(base_path + source) if debug: - print 'AddSource', org_source, result[len(result) - 1] + print('AddSource', org_source, result[len(result) - 1]) def _ExtractSourcesFromAction(action, base_path, base_path_components, @@ -185,7 +186,7 @@ def _ExtractSources(target, target_dict, toplevel_dir): base_path += '/' if debug: - print 'ExtractSources', target, base_path + print('ExtractSources', target, base_path) results = [] if 'sources' in target_dict: @@ -278,7 +279,7 @@ def _WasBuildFileModified(build_file, data, files, toplevel_dir): the root of the source tree.""" if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files: if debug: - print 'gyp file modified', build_file + print('gyp file modified', build_file) return True # First element of included_files is the file itself. @@ -291,8 +292,8 @@ def _WasBuildFileModified(build_file, data, files, toplevel_dir): _ToGypPath(gyp.common.UnrelativePath(include_file, build_file)) if _ToLocalPath(toplevel_dir, rel_include_file) in files: if debug: - print 'included gyp file modified, gyp_file=', build_file, \ - 'included file=', rel_include_file + print('included gyp file modified, gyp_file=', build_file, \ + 'included file=', rel_include_file) return True return False @@ -373,7 +374,7 @@ def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files, # If a build file (or any of its included files) is modified we assume all # targets in the file are modified. if build_file_in_files[build_file]: - print 'matching target from modified build file', target_name + print('matching target from modified build file', target_name) target.match_status = MATCH_STATUS_MATCHES matching_targets.append(target) else: @@ -381,7 +382,7 @@ def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files, toplevel_dir) for source in sources: if _ToGypPath(os.path.normpath(source)) in files: - print 'target', target_name, 'matches', source + print('target', target_name, 'matches', source) target.match_status = MATCH_STATUS_MATCHES matching_targets.append(target) break @@ -433,7 +434,7 @@ def _DoesTargetDependOnMatchingTargets(target): for dep in target.deps: if _DoesTargetDependOnMatchingTargets(dep): target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY - print '\t', target.name, 'matches by dep', dep.name + print('\t', target.name, 'matches by dep', dep.name) return True target.match_status = MATCH_STATUS_DOESNT_MATCH return False @@ -445,7 +446,7 @@ def _GetTargetsDependingOnMatchingTargets(possible_targets): supplied as input to analyzer. possible_targets: targets to search from.""" found = [] - print 'Targets that matched by dependency:' + print('Targets that matched by dependency:') for target in possible_targets: if _DoesTargetDependOnMatchingTargets(target): found.append(target) @@ -484,12 +485,12 @@ def _AddCompileTargets(target, roots, add_if_no_ancestor, result): (add_if_no_ancestor or target.requires_build)) or (target.is_static_library and add_if_no_ancestor and not target.is_or_has_linked_ancestor)): - print '\t\tadding to compile targets', target.name, 'executable', \ + print('\t\tadding to compile targets', target.name, 'executable', \ target.is_executable, 'added_to_compile_targets', \ target.added_to_compile_targets, 'add_if_no_ancestor', \ add_if_no_ancestor, 'requires_build', target.requires_build, \ 'is_static_library', target.is_static_library, \ - 'is_or_has_linked_ancestor', target.is_or_has_linked_ancestor + 'is_or_has_linked_ancestor', target.is_or_has_linked_ancestor) result.add(target) target.added_to_compile_targets = True @@ -500,7 +501,7 @@ def _GetCompileTargets(matching_targets, supplied_targets): supplied_targets: set of targets supplied to analyzer to search from.""" result = set() for target in matching_targets: - print 'finding compile targets for match', target.name + print('finding compile targets for match', target.name) _AddCompileTargets(target, supplied_targets, True, result) return result @@ -508,46 +509,46 @@ def _GetCompileTargets(matching_targets, supplied_targets): def _WriteOutput(params, **values): """Writes the output, either to stdout or a file is specified.""" if 'error' in values: - print 'Error:', values['error'] + print('Error:', values['error']) if 'status' in values: - print values['status'] + print(values['status']) if 'targets' in values: values['targets'].sort() - print 'Supplied targets that depend on changed files:' + print('Supplied targets that depend on changed files:') for target in values['targets']: - print '\t', target + print('\t', target) if 'invalid_targets' in values: values['invalid_targets'].sort() - print 'The following targets were not found:' + print('The following targets were not found:') for target in values['invalid_targets']: - print '\t', target + print('\t', target) if 'build_targets' in values: values['build_targets'].sort() - print 'Targets that require a build:' + print('Targets that require a build:') for target in values['build_targets']: - print '\t', target + print('\t', target) if 'compile_targets' in values: values['compile_targets'].sort() - print 'Targets that need to be built:' + print('Targets that need to be built:') for target in values['compile_targets']: - print '\t', target + print('\t', target) if 'test_targets' in values: values['test_targets'].sort() - print 'Test targets:' + print('Test targets:') for target in values['test_targets']: - print '\t', target + print('\t', target) output_path = params.get('generator_flags', {}).get( 'analyzer_output_path', None) if not output_path: - print json.dumps(values) + print(json.dumps(values)) return try: f = open(output_path, 'w') f.write(json.dumps(values) + '\n') f.close() except IOError as e: - print 'Error writing to output file', output_path, str(e) + print('Error writing to output file', output_path, str(e)) def _WasGypIncludeFileModified(params, files): @@ -556,7 +557,7 @@ def _WasGypIncludeFileModified(params, files): if params['options'].includes: for include in params['options'].includes: if _ToGypPath(os.path.normpath(include)) in files: - print 'Include file modified, assuming all changed', include + print('Include file modified, assuming all changed', include) return True return False @@ -638,13 +639,13 @@ def find_matching_test_target_names(self): set(self._root_targets))] else: test_targets = [x for x in test_targets_no_all] - print 'supplied test_targets' + print('supplied test_targets') for target_name in self._test_target_names: - print '\t', target_name - print 'found test_targets' + print('\t', target_name) + print('found test_targets') for target in test_targets: - print '\t', target.name - print 'searching for matching test targets' + print('\t', target.name) + print('searching for matching test targets') matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets) matching_test_targets_contains_all = (test_target_names_contains_all and set(matching_test_targets) & @@ -654,14 +655,14 @@ def find_matching_test_target_names(self): # 'all' is subsequentely added to the matching names below. matching_test_targets = [x for x in (set(matching_test_targets) & set(test_targets_no_all))] - print 'matched test_targets' + print('matched test_targets') for target in matching_test_targets: - print '\t', target.name + print('\t', target.name) matching_target_names = [gyp.common.ParseQualifiedTarget(target.name)[1] for target in matching_test_targets] if matching_test_targets_contains_all: matching_target_names.append('all') - print '\tall' + print('\tall') return matching_target_names def find_matching_compile_target_names(self): @@ -677,10 +678,10 @@ def find_matching_compile_target_names(self): if 'all' in self._supplied_target_names(): supplied_targets = [x for x in (set(supplied_targets) | set(self._root_targets))] - print 'Supplied test_targets & compile_targets' + print('Supplied test_targets & compile_targets') for target in supplied_targets: - print '\t', target.name - print 'Finding compile targets' + print('\t', target.name) + print('Finding compile targets') compile_targets = _GetCompileTargets(self._changed_targets, supplied_targets) return [gyp.common.ParseQualifiedTarget(target.name)[1] @@ -699,7 +700,7 @@ def GenerateOutput(target_list, target_dicts, data, params): toplevel_dir = _ToGypPath(os.path.abspath(params['options'].toplevel_dir)) if debug: - print 'toplevel_dir', toplevel_dir + print('toplevel_dir', toplevel_dir) if _WasGypIncludeFileModified(params, config.files): result_dict = { 'status': all_changed_string, diff --git a/tools/gyp/pylib/gyp/generator/cmake.py b/tools/gyp/pylib/gyp/generator/cmake.py index a2b96291aa526a..ab5170cdc55619 100644 --- a/tools/gyp/pylib/gyp/generator/cmake.py +++ b/tools/gyp/pylib/gyp/generator/cmake.py @@ -27,6 +27,7 @@ not be able to find the header file directories described in the generated CMakeLists.txt file. """ +from __future__ import print_function import multiprocessing import os @@ -868,8 +869,8 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, default_product_ext = generator_default_variables['SHARED_LIB_SUFFIX'] elif target_type != 'executable': - print ('ERROR: What output file should be generated?', - 'type', target_type, 'target', target_name) + print(('ERROR: What output file should be generated?', + 'type', target_type, 'target', target_name)) product_prefix = spec.get('product_prefix', default_product_prefix) product_name = spec.get('product_name', default_product_name) @@ -1207,11 +1208,11 @@ def PerformBuild(data, configurations, params): output_dir, config_name)) arguments = ['cmake', '-G', 'Ninja'] - print 'Generating [%s]: %s' % (config_name, arguments) + print('Generating [%s]: %s' % (config_name, arguments)) subprocess.check_call(arguments, cwd=build_dir) arguments = ['ninja', '-C', build_dir] - print 'Building [%s]: %s' % (config_name, arguments) + print('Building [%s]: %s' % (config_name, arguments)) subprocess.check_call(arguments) @@ -1239,7 +1240,7 @@ def GenerateOutput(target_list, target_dicts, data, params): arglists.append((target_list, target_dicts, data, params, config_name)) pool.map(CallGenerateOutputForConfig, arglists) - except KeyboardInterrupt, e: + except KeyboardInterrupt as e: pool.terminate() raise e else: diff --git a/tools/gyp/pylib/gyp/generator/dump_dependency_json.py b/tools/gyp/pylib/gyp/generator/dump_dependency_json.py index 160eafe2efeca0..8e4f3168f3e7ef 100644 --- a/tools/gyp/pylib/gyp/generator/dump_dependency_json.py +++ b/tools/gyp/pylib/gyp/generator/dump_dependency_json.py @@ -1,3 +1,4 @@ +from __future__ import print_function # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -96,4 +97,4 @@ def GenerateOutput(target_list, target_dicts, data, params): f = open(filename, 'w') json.dump(edges, f) f.close() - print 'Wrote json to %s.' % filename + print('Wrote json to %s.' % filename) diff --git a/tools/gyp/pylib/gyp/generator/make.py b/tools/gyp/pylib/gyp/generator/make.py index e98d93ab233ff8..8f5457b10ba9da 100644 --- a/tools/gyp/pylib/gyp/generator/make.py +++ b/tools/gyp/pylib/gyp/generator/make.py @@ -1,3 +1,4 @@ +from __future__ import print_function # Copyright (c) 2013 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -23,7 +24,6 @@ import os import re -import sys import subprocess import gyp import gyp.common @@ -33,6 +33,11 @@ import hashlib +try: + xrange # Python 2 +except NameError: + xrange = range # Python 3 + generator_default_variables = { 'EXECUTABLE_PREFIX': '', 'EXECUTABLE_SUFFIX': '', @@ -1359,8 +1364,8 @@ def ComputeOutputBasename(self, spec): elif self.type == 'none': target = '%s.stamp' % target elif self.type != 'executable': - print ("ERROR: What output file should be generated?", - "type", self.type, "target", target) + print(("ERROR: What output file should be generated?", + "type", self.type, "target", target)) target_prefix = spec.get('product_prefix', target_prefix) target = spec.get('product_name', target) @@ -1616,7 +1621,7 @@ def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, self.WriteDoCmd([self.output_binary], deps, 'touch', part_of_all, postbuilds=postbuilds) else: - print "WARNING: no output for", self.type, target + print("WARNING: no output for", self.type, self.target) # Add an alias for each target (if there are any outputs). # Installable target aliases are created below. @@ -1968,7 +1973,7 @@ def PerformBuild(data, configurations, params): if options.toplevel_dir and options.toplevel_dir != '.': arguments += '-C', options.toplevel_dir arguments.append('BUILDTYPE=' + config) - print 'Building [%s]: %s' % (config, arguments) + print('Building [%s]: %s' % (config, arguments)) subprocess.check_call(arguments) diff --git a/tools/gyp/pylib/gyp/generator/msvs.py b/tools/gyp/pylib/gyp/generator/msvs.py index 8fe9e5af23dd38..e6090214183263 100644 --- a/tools/gyp/pylib/gyp/generator/msvs.py +++ b/tools/gyp/pylib/gyp/generator/msvs.py @@ -1,3 +1,4 @@ +from __future__ import print_function # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -315,10 +316,10 @@ def _ConfigWindowsTargetPlatformVersion(config_data, version): if names: return names[0] else: - print >> sys.stdout, ( + print(( 'Warning: No include files found for ' 'detected Windows SDK version %s' % (version) - ) + ), file=sys.stdout) def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path, @@ -775,8 +776,8 @@ def _Replace(match): # the VCProj but cause the same problem on the final command-line. Moving # the item to the end of the list does works, but that's only possible if # there's only one such item. Let's just warn the user. - print >> sys.stderr, ('Warning: MSVS may misinterpret the odd number of ' + - 'quotes in ' + s) + print(('Warning: MSVS may misinterpret the odd number of ' + + 'quotes in ' + s), file=sys.stderr) return s @@ -1977,7 +1978,7 @@ def PerformBuild(data, configurations, params): for config in configurations: arguments = [devenv, sln_path, '/Build', config] - print 'Building [%s]: %s' % (config, arguments) + print('Building [%s]: %s' % (config, arguments)) rtn = subprocess.check_call(arguments) @@ -2072,7 +2073,7 @@ def GenerateOutput(target_list, target_dicts, data, params): if generator_flags.get('msvs_error_on_missing_sources', False): raise GypError(error_message) else: - print >> sys.stdout, "Warning: " + error_message + print("Warning: " + error_message, file=sys.stdout) def _GenerateMSBuildFiltersFile(filters_path, source_files, @@ -2831,7 +2832,7 @@ def _ConvertMSVSBuildAttributes(spec, config, build_file): elif a == 'ConfigurationType': msbuild_attributes[a] = _ConvertMSVSConfigurationType(msvs_attributes[a]) else: - print 'Warning: Do not know how to convert MSVS attribute ' + a + print('Warning: Do not know how to convert MSVS attribute ' + a) return msbuild_attributes diff --git a/tools/gyp/pylib/gyp/generator/ninja.py b/tools/gyp/pylib/gyp/generator/ninja.py index 6140df9513096c..ac3279f7b01133 100644 --- a/tools/gyp/pylib/gyp/generator/ninja.py +++ b/tools/gyp/pylib/gyp/generator/ninja.py @@ -1,3 +1,4 @@ +from __future__ import print_function # Copyright (c) 2013 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -455,8 +456,8 @@ def WriteSpec(self, spec, config_name, generator_flags): try: sources = extra_sources + spec.get('sources', []) except TypeError: - print 'extra_sources: ', str(extra_sources) - print 'spec.get("sources"): ', str(spec.get('sources')) + print('extra_sources: ', str(extra_sources)) + print('spec.get("sources"): ', str(spec.get('sources'))) raise if sources: if self.flavor == 'mac' and len(self.archs) > 1: @@ -485,8 +486,8 @@ def WriteSpec(self, spec, config_name, generator_flags): if self.flavor != 'mac' or len(self.archs) == 1: link_deps += [self.GypPathToNinja(o) for o in obj_outputs] else: - print "Warning: Actions/rules writing object files don't work with " \ - "multiarch targets, dropping. (target %s)" % spec['target_name'] + print("Warning: Actions/rules writing object files don't work with " \ + "multiarch targets, dropping. (target %s)" % spec['target_name']) elif self.flavor == 'mac' and len(self.archs) > 1: link_deps = collections.defaultdict(list) @@ -2442,7 +2443,7 @@ def PerformBuild(data, configurations, params): for config in configurations: builddir = os.path.join(options.toplevel_dir, 'out', config) arguments = ['ninja', '-C', builddir] - print 'Building [%s]: %s' % (config, arguments) + print('Building [%s]: %s' % (config, arguments)) subprocess.check_call(arguments) @@ -2479,7 +2480,7 @@ def GenerateOutput(target_list, target_dicts, data, params): arglists.append( (target_list, target_dicts, data, params, config_name)) pool.map(CallGenerateOutputForConfig, arglists) - except KeyboardInterrupt, e: + except KeyboardInterrupt as e: pool.terminate() raise e else: diff --git a/tools/gyp/pylib/gyp/generator/xcode.py b/tools/gyp/pylib/gyp/generator/xcode.py index db99d6ab81ed5c..b217610f147583 100644 --- a/tools/gyp/pylib/gyp/generator/xcode.py +++ b/tools/gyp/pylib/gyp/generator/xcode.py @@ -1,3 +1,4 @@ +from __future__ import print_function # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -15,6 +16,10 @@ import subprocess import tempfile +try: + xrange # Python 2 +except NameError: + xrange = range # Python 3 # Project files generated by this module will use _intermediate_var as a # custom Xcode setting whose value is a DerivedSources-like directory that's @@ -129,7 +134,7 @@ def __init__(self, gyp_path, path, build_file_dict): try: os.makedirs(self.path) self.created_dir = True - except OSError, e: + except OSError as e: if e.errno != errno.EEXIST: raise @@ -454,7 +459,7 @@ def Write(self): same = False try: same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False) - except OSError, e: + except OSError as e: if e.errno != errno.ENOENT: raise @@ -473,10 +478,10 @@ def Write(self): # # No way to get the umask without setting a new one? Set a safe one # and then set it back to the old value. - umask = os.umask(077) + umask = os.umask(0o77) os.umask(umask) - os.chmod(new_pbxproj_path, 0666 & ~umask) + os.chmod(new_pbxproj_path, 0o666 & ~umask) os.rename(new_pbxproj_path, pbxproj_path) except Exception: @@ -577,7 +582,7 @@ def PerformBuild(data, configurations, params): for config in configurations: arguments = ['xcodebuild', '-project', xcodeproj_path] arguments += ['-configuration', config] - print "Building [%s]: %s" % (config, arguments) + print("Building [%s]: %s" % (config, arguments)) subprocess.check_call(arguments) @@ -744,7 +749,7 @@ def GenerateOutput(target_list, target_dicts, data, params): xctarget_type = gyp.xcodeproj_file.PBXNativeTarget try: target_properties['productType'] = _types[type_bundle_key] - except KeyError, e: + except KeyError as e: gyp.common.ExceptionAppend(e, "-- unknown product type while " "writing target %s" % target_name) raise diff --git a/tools/gyp/pylib/gyp/input.py b/tools/gyp/pylib/gyp/input.py index a046a15cc1d2d6..13d18d6b1b501f 100644 --- a/tools/gyp/pylib/gyp/input.py +++ b/tools/gyp/pylib/gyp/input.py @@ -1,3 +1,4 @@ +from __future__ import print_function # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -26,6 +27,10 @@ from gyp.common import GypError from gyp.common import OrderedSet +try: + xrange # Python 2 +except NameError: + xrange = range # Python 3 # A list of types that are treated as linkable. linkable_types = [ @@ -242,10 +247,10 @@ def LoadOneBuildFile(build_file_path, data, aux_data, includes, else: build_file_data = eval(build_file_contents, {'__builtins__': None}, None) - except SyntaxError, e: + except SyntaxError as e: e.filename = build_file_path raise - except Exception, e: + except Exception as e: gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path) raise @@ -265,7 +270,7 @@ def LoadOneBuildFile(build_file_path, data, aux_data, includes, else: LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, aux_data, None, check) - except Exception, e: + except Exception as e: gyp.common.ExceptionAppend(e, 'while reading includes of ' + build_file_path) raise @@ -467,7 +472,7 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, try: LoadTargetBuildFile(dependency, data, aux_data, variables, includes, depth, check, load_dependencies) - except Exception, e: + except Exception as e: gyp.common.ExceptionAppend( e, 'while loading dependencies of %s' % build_file_path) raise @@ -510,12 +515,12 @@ def CallLoadTargetBuildFile(global_flags, return (build_file_path, build_file_data, dependencies) - except GypError, e: + except GypError as e: sys.stderr.write("gyp: %s\n" % e) return None - except Exception, e: - print >>sys.stderr, 'Exception:', e - print >>sys.stderr, traceback.format_exc() + except Exception as e: + print('Exception:', e, file=sys.stderr) + print(traceback.format_exc(), file=sys.stderr) return None @@ -605,7 +610,7 @@ def LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth, args = (global_flags, dependency, variables, includes, depth, check, generator_input_info), callback = parallel_state.LoadTargetBuildFileCallback) - except KeyboardInterrupt, e: + except KeyboardInterrupt as e: parallel_state.pool.terminate() raise e @@ -905,7 +910,7 @@ def ExpandVariables(input, phase, variables, build_file): stderr=subprocess.PIPE, stdin=subprocess.PIPE, cwd=build_file_dir) - except Exception, e: + except Exception as e: raise GypError("%s while executing command '%s' in %s" % (e, contents, build_file)) @@ -1090,13 +1095,13 @@ def EvalSingleCondition( if eval(ast_code, {'__builtins__': None}, variables): return true_dict return false_dict - except SyntaxError, e: + except SyntaxError as e: syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s ' 'at character %d.' % (str(e.args[0]), e.text, build_file, e.offset), e.filename, e.lineno, e.offset, e.text) raise syntax_error - except NameError, e: + except NameError as e: gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' % (cond_expr_expanded, build_file)) raise GypError(e) @@ -1857,7 +1862,7 @@ def VerifyNoGYPFileCircularDependencies(targets): for dependency in target_dependencies: try: dependency_build_file = gyp.common.BuildFile(dependency) - except GypError, e: + except GypError as e: gyp.common.ExceptionAppend( e, 'while computing dependencies of .gyp file %s' % build_file) raise @@ -2780,7 +2785,7 @@ def Load(build_files, variables, includes, depth, generator_input_info, check, try: LoadTargetBuildFile(build_file, data, aux_data, variables, includes, depth, check, True) - except Exception, e: + except Exception as e: gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file) raise diff --git a/tools/gyp/pylib/gyp/mac_tool.py b/tools/gyp/pylib/gyp/mac_tool.py index 0ad7e7a1b66b8a..eca6cf4e672c57 100755 --- a/tools/gyp/pylib/gyp/mac_tool.py +++ b/tools/gyp/pylib/gyp/mac_tool.py @@ -7,6 +7,7 @@ These functions are executed via gyp-mac-tool when using the Makefile generator. """ +from __future__ import print_function import fcntl import fnmatch @@ -270,7 +271,7 @@ def ExecFilterLibtool(self, *cmd_list): _, err = libtoolout.communicate() for line in err.splitlines(): if not libtool_re.match(line) and not libtool_re5.match(line): - print >>sys.stderr, line + print(line, file=sys.stderr) # Unconditionally touch the output .a file on the command line if present # and the command succeeded. A bit hacky. if not libtoolout.returncode: @@ -480,8 +481,8 @@ def _FindProvisioningProfile(self, profile, bundle_identifier): profiles_dir = os.path.join( os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') if not os.path.isdir(profiles_dir): - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) + print(( + 'cannot find mobile provisioning for %s' % bundle_identifier), file=sys.stderr) sys.exit(1) provisioning_profiles = None if profile: @@ -502,8 +503,8 @@ def _FindProvisioningProfile(self, profile, bundle_identifier): valid_provisioning_profiles[app_id_pattern] = ( profile_path, profile_data, team_identifier) if not valid_provisioning_profiles: - print >>sys.stderr, ( - 'cannot find mobile provisioning for %s' % bundle_identifier) + print(( + 'cannot find mobile provisioning for %s' % bundle_identifier), file=sys.stderr) sys.exit(1) # If the user has multiple provisioning profiles installed that can be # used for ${bundle_identifier}, pick the most specific one (ie. the @@ -667,7 +668,7 @@ def WriteHmap(output_name, filelist): count = len(filelist) capacity = NextGreaterPowerOf2(count) strings_offset = 24 + (12 * capacity) - max_value_length = len(max(filelist.items(), key=lambda (k,v):len(v))[1]) + max_value_length = len(max(filelist.items(), key=lambda k__v: len(k__v[1]))[1]) out = open(output_name, "wb") out.write(struct.pack(' y) - (x < y) + +try: + unicode # Python 2 +except NameError: + unicode = str # Python 3 + +try: + xrange # Python 2 +except NameError: + xrange = range # Python 3 + + # See XCObject._EncodeString. This pattern is used to determine when a string # can be printed unquoted. Strings that match this pattern may be printed @@ -691,7 +708,7 @@ def _XCKVPrint(self, file, tabs, key, value): printable_value[0] == '"' and printable_value[-1] == '"': printable_value = printable_value[1:-1] printable += printable_key + ' = ' + printable_value + ';' + after_kv - except TypeError, e: + except TypeError as e: gyp.common.ExceptionAppend(e, 'while printing key "%s"' % key) raise diff --git a/tools/gyp/tools/graphviz.py b/tools/gyp/tools/graphviz.py index 326ae221cf8e82..dcf7c765e5fefe 100755 --- a/tools/gyp/tools/graphviz.py +++ b/tools/gyp/tools/graphviz.py @@ -7,6 +7,7 @@ """Using the JSON dumped by the dump-dependency-json generator, generate input suitable for graphviz to render a dependency graph of targets.""" +from __future__ import print_function import collections import json @@ -50,9 +51,9 @@ def WriteGraph(edges): build_file, target_name, toolset = ParseTarget(src) files[build_file].append(src) - print 'digraph D {' - print ' fontsize=8' # Used by subgraphs. - print ' node [fontsize=8]' + print('digraph D {') + print(' fontsize=8') # Used by subgraphs. + print(' node [fontsize=8]') # Output nodes by file. We must first write out each node within # its file grouping before writing out any edges that may refer @@ -63,31 +64,31 @@ def WriteGraph(edges): # the display by making it a box without an internal node. target = targets[0] build_file, target_name, toolset = ParseTarget(target) - print ' "%s" [shape=box, label="%s\\n%s"]' % (target, filename, - target_name) + print(' "%s" [shape=box, label="%s\\n%s"]' % (target, filename, + target_name)) else: # Group multiple nodes together in a subgraph. - print ' subgraph "cluster_%s" {' % filename - print ' label = "%s"' % filename + print(' subgraph "cluster_%s" {' % filename) + print(' label = "%s"' % filename) for target in targets: build_file, target_name, toolset = ParseTarget(target) - print ' "%s" [label="%s"]' % (target, target_name) - print ' }' + print(' "%s" [label="%s"]' % (target, target_name)) + print(' }') # Now that we've placed all the nodes within subgraphs, output all # the edges between nodes. for src, dsts in edges.items(): for dst in dsts: - print ' "%s" -> "%s"' % (src, dst) + print(' "%s" -> "%s"' % (src, dst)) - print '}' + print('}') def main(): if len(sys.argv) < 2: - print >>sys.stderr, __doc__ - print >>sys.stderr - print >>sys.stderr, 'usage: %s target1 target2...' % (sys.argv[0]) + print(__doc__, file=sys.stderr) + print(file=sys.stderr) + print('usage: %s target1 target2...' % (sys.argv[0]), file=sys.stderr) return 1 edges = LoadEdges('dump.json', sys.argv[1:]) diff --git a/tools/gyp/tools/pretty_gyp.py b/tools/gyp/tools/pretty_gyp.py index d5736bbd4a6009..d413c55a63022a 100755 --- a/tools/gyp/tools/pretty_gyp.py +++ b/tools/gyp/tools/pretty_gyp.py @@ -5,6 +5,7 @@ # found in the LICENSE file. """Pretty-prints the contents of a GYP file.""" +from __future__ import print_function import sys import re @@ -125,15 +126,15 @@ def prettyprint_input(lines): (brace_diff, after) = count_braces(line) if brace_diff != 0: if after: - print " " * (basic_offset * indent) + line + print(" " * (basic_offset * indent) + line) indent += brace_diff else: indent += brace_diff - print " " * (basic_offset * indent) + line + print(" " * (basic_offset * indent) + line) else: - print " " * (basic_offset * indent) + line + print(" " * (basic_offset * indent) + line) else: - print "" + print("") last_line = line diff --git a/tools/gyp/tools/pretty_sln.py b/tools/gyp/tools/pretty_sln.py index ca8cf4ad3fb836..64a9ec2bf6f2e1 100755 --- a/tools/gyp/tools/pretty_sln.py +++ b/tools/gyp/tools/pretty_sln.py @@ -11,6 +11,7 @@ Then it outputs a possible build order. """ +from __future__ import print_function __author__ = 'nsylvain (Nicolas Sylvain)' @@ -26,7 +27,7 @@ def BuildProject(project, built, projects, deps): for dep in deps[project]: if dep not in built: BuildProject(dep, built, projects, deps) - print project + print(project) built.append(project) def ParseSolution(solution_file): @@ -100,44 +101,44 @@ def ParseSolution(solution_file): return (projects, dependencies) def PrintDependencies(projects, deps): - print "---------------------------------------" - print "Dependencies for all projects" - print "---------------------------------------" - print "-- --" + print("---------------------------------------") + print("Dependencies for all projects") + print("---------------------------------------") + print("-- --") for (project, dep_list) in sorted(deps.items()): - print "Project : %s" % project - print "Path : %s" % projects[project][0] + print("Project : %s" % project) + print("Path : %s" % projects[project][0]) if dep_list: for dep in dep_list: - print " - %s" % dep - print "" + print(" - %s" % dep) + print("") - print "-- --" + print("-- --") def PrintBuildOrder(projects, deps): - print "---------------------------------------" - print "Build order " - print "---------------------------------------" - print "-- --" + print("---------------------------------------") + print("Build order ") + print("---------------------------------------") + print("-- --") built = [] for (project, _) in sorted(deps.items()): if project not in built: BuildProject(project, built, projects, deps) - print "-- --" + print("-- --") def PrintVCProj(projects): for project in projects: - print "-------------------------------------" - print "-------------------------------------" - print project - print project - print project - print "-------------------------------------" - print "-------------------------------------" + print("-------------------------------------") + print("-------------------------------------") + print(project) + print(project) + print(project) + print("-------------------------------------") + print("-------------------------------------") project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]), projects[project][2])) @@ -153,7 +154,7 @@ def PrintVCProj(projects): def main(): # check if we have exactly 1 parameter. if len(sys.argv) < 2: - print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0] + print('Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0]) return 1 (projects, deps) = ParseSolution(sys.argv[1]) diff --git a/tools/gyp/tools/pretty_vcproj.py b/tools/gyp/tools/pretty_vcproj.py index 6099bd7cc4d8cc..ded0788edc6efc 100755 --- a/tools/gyp/tools/pretty_vcproj.py +++ b/tools/gyp/tools/pretty_vcproj.py @@ -11,6 +11,7 @@ It outputs the resulting xml to stdout. """ +from __future__ import print_function __author__ = 'nsylvain (Nicolas Sylvain)' @@ -20,6 +21,12 @@ from xml.dom.minidom import parse from xml.dom.minidom import Node +try: + cmp # Python 2 +except NameError: + def cmp(x, y): # Python 3 + return (x > y) - (x < y) + REPLACEMENTS = dict() ARGUMENTS = None @@ -61,7 +68,7 @@ def get_string(node): def PrettyPrintNode(node, indent=0): if node.nodeType == Node.TEXT_NODE: if node.data.strip(): - print '%s%s' % (' '*indent, node.data.strip()) + print('%s%s' % (' '*indent, node.data.strip())) return if node.childNodes: @@ -73,23 +80,23 @@ def PrettyPrintNode(node, indent=0): # Print the main tag if attr_count == 0: - print '%s<%s>' % (' '*indent, node.nodeName) + print('%s<%s>' % (' '*indent, node.nodeName)) else: - print '%s<%s' % (' '*indent, node.nodeName) + print('%s<%s' % (' '*indent, node.nodeName)) all_attributes = [] for (name, value) in node.attributes.items(): all_attributes.append((name, value)) all_attributes.sort(CmpTuple()) for (name, value) in all_attributes: - print '%s %s="%s"' % (' '*indent, name, value) - print '%s>' % (' '*indent) + print('%s %s="%s"' % (' '*indent, name, value)) + print('%s>' % (' '*indent)) if node.nodeValue: - print '%s %s' % (' '*indent, node.nodeValue) + print('%s %s' % (' '*indent, node.nodeValue)) for sub_node in node.childNodes: PrettyPrintNode(sub_node, indent=indent+2) - print '%s' % (' '*indent, node.nodeName) + print('%s' % (' '*indent, node.nodeName)) def FlattenFilter(node): diff --git a/tools/gyp_node.py b/tools/gyp_node.py index 043053c3daa9d9..831d2ef1550cdb 100755 --- a/tools/gyp_node.py +++ b/tools/gyp_node.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +from __future__ import print_function import os import sys @@ -52,7 +53,7 @@ def run_gyp(args): rc = gyp.main(args) if rc != 0: - print 'Error running GYP' + print('Error running GYP') sys.exit(rc) diff --git a/tools/icu/icutrim.py b/tools/icu/icutrim.py index 517bf39bad323d..485fd903d6149d 100755 --- a/tools/icu/icutrim.py +++ b/tools/icu/icutrim.py @@ -11,17 +11,26 @@ # Usage: # Use "-h" to get help options. +from __future__ import print_function import sys import shutil -# for utf-8 -reload(sys) -sys.setdefaultencoding("utf-8") - import optparse import os import json import re +try: + unicode # Python 2 +except NameError: + unicode = str # Python 3 + +# for utf-8 in Python 2 +try: + reload(sys) + sys.setdefaultencoding("utf-8") +except NameError: + pass + endian=sys.byteorder parser = optparse.OptionParser(usage="usage: mkdir tmp ; %prog -D ~/Downloads/icudt53l.dat -T tmp -F trim_en.json -O icudt53l.dat" ) @@ -79,54 +88,54 @@ for opt in [ "datfile", "filterfile", "tmpdir", "outfile" ]: if optVars[opt] is None: - print "Missing required option: %s" % opt + print("Missing required option: %s" % opt) sys.exit(1) if options.verbose>0: - print "Options: "+str(options) + print("Options: "+str(options)) if (os.path.isdir(options.tmpdir) and options.deltmpdir): if options.verbose>1: - print "Deleting tmp dir %s.." % (options.tmpdir) + print("Deleting tmp dir %s.." % (options.tmpdir)) shutil.rmtree(options.tmpdir) if not (os.path.isdir(options.tmpdir)): os.mkdir(options.tmpdir) else: - print "Please delete tmpdir %s before beginning." % options.tmpdir + print("Please delete tmpdir %s before beginning." % options.tmpdir) sys.exit(1) if options.endian not in ("big","little","host"): - print "Unknown endianness: %s" % options.endian + print("Unknown endianness: %s" % options.endian) sys.exit(1) if options.endian is "host": options.endian = endian if not os.path.isdir(options.tmpdir): - print "Error, tmpdir not a directory: %s" % (options.tmpdir) + print("Error, tmpdir not a directory: %s" % (options.tmpdir)) sys.exit(1) if not os.path.isfile(options.filterfile): - print "Filterfile doesn't exist: %s" % (options.filterfile) + print("Filterfile doesn't exist: %s" % (options.filterfile)) sys.exit(1) if not os.path.isfile(options.datfile): - print "Datfile doesn't exist: %s" % (options.datfile) + print("Datfile doesn't exist: %s" % (options.datfile)) sys.exit(1) if not options.datfile.endswith(".dat"): - print "Datfile doesn't end with .dat: %s" % (options.datfile) + print("Datfile doesn't end with .dat: %s" % (options.datfile)) sys.exit(1) outfile = os.path.join(options.tmpdir, options.outfile) if os.path.isfile(outfile): - print "Error, output file does exist: %s" % (outfile) + print("Error, output file does exist: %s" % (outfile)) sys.exit(1) if not options.outfile.endswith(".dat"): - print "Outfile doesn't end with .dat: %s" % (options.outfile) + print("Outfile doesn't end with .dat: %s" % (options.outfile)) sys.exit(1) dataname=options.outfile[0:-4] @@ -140,11 +149,11 @@ def runcmd(tool, cmd, doContinue=False): cmd = tool + " " + cmd if(options.verbose>4): - print "# " + cmd + print("# " + cmd) rc = os.system(cmd) if rc is not 0 and not doContinue: - print "FAILED: %s" % cmd + print("FAILED: %s" % cmd) sys.exit(1) return rc @@ -161,10 +170,10 @@ def runcmd(tool, cmd, doContinue=False): config["variables"]["locales"]["only"] = options.locales.split(',') if (options.verbose > 6): - print config + print(config) if(config.has_key("comment")): - print "%s: %s" % (options.filterfile, config["comment"]) + print("%s: %s" % (options.filterfile, config["comment"])) ## STEP 1 - copy the data file, swapping endianness ## The first letter of endian_letter will be 'b' or 'l' for big or little @@ -184,7 +193,7 @@ def runcmd(tool, cmd, doContinue=False): itemset = set(items) if (options.verbose>1): - print "input file: %d items" % (len(items)) + print("input file: %d items" % (len(items))) # list of all trees trees = {} @@ -211,23 +220,23 @@ def queueForRemoval(tree): return mytree = trees[tree] if(options.verbose>0): - print "* %s: %d items" % (tree, len(mytree["locs"])) + print("* %s: %d items" % (tree, len(mytree["locs"]))) # do varible substitution for this tree here if type(config["trees"][tree]) == str or type(config["trees"][tree]) == unicode: treeStr = config["trees"][tree] if(options.verbose>5): - print " Substituting $%s for tree %s" % (treeStr, tree) + print(" Substituting $%s for tree %s" % (treeStr, tree)) if(not config.has_key("variables") or not config["variables"].has_key(treeStr)): - print " ERROR: no variable: variables.%s for tree %s" % (treeStr, tree) + print(" ERROR: no variable: variables.%s for tree %s" % (treeStr, tree)) sys.exit(1) config["trees"][tree] = config["variables"][treeStr] myconfig = config["trees"][tree] if(options.verbose>4): - print " Config: %s" % (myconfig) + print(" Config: %s" % (myconfig)) # Process this tree if(len(myconfig)==0 or len(mytree["locs"])==0): if(options.verbose>2): - print " No processing for %s - skipping" % (tree) + print(" No processing for %s - skipping" % (tree)) else: only = None if myconfig.has_key("only"): @@ -236,22 +245,22 @@ def queueForRemoval(tree): thePool = "%spool.res" % (mytree["treeprefix"]) if (thePool in itemset): if(options.verbose>0): - print "Removing %s because tree %s is empty." % (thePool, tree) + print("Removing %s because tree %s is empty." % (thePool, tree)) remove.add(thePool) else: - print "tree %s - no ONLY" + print("tree %s - no ONLY") for l in range(len(mytree["locs"])): loc = mytree["locs"][l] if (only is not None) and not loc in only: # REMOVE loc toRemove = "%s%s%s" % (mytree["treeprefix"], loc, mytree["extension"]) if(options.verbose>6): - print "Queueing for removal: %s" % toRemove + print("Queueing for removal: %s" % toRemove) remove.add(toRemove) def addTreeByType(tree, mytree): if(options.verbose>1): - print "(considering %s): %s" % (tree, mytree) + print("(considering %s): %s" % (tree, mytree)) trees[tree] = mytree mytree["locs"]=[] for i in range(len(items)): @@ -278,7 +287,7 @@ def addTreeByType(tree, mytree): else: tree = treeprefix[0:-1] if(options.verbose>6): - print "procesing %s" % (tree) + print("procesing %s" % (tree)) trees[tree] = { "extension": ".res", "treeprefix": treeprefix, "hasIndex": True } # read in the resource list for the tree treelistfile = os.path.join(options.tmpdir,"%s.lst" % tree) @@ -288,7 +297,7 @@ def addTreeByType(tree, mytree): trees[tree]["locs"] = [treeitems[i].strip() for i in range(len(treeitems))] fi.close() if(not config.has_key("trees") or not config["trees"].has_key(tree)): - print " Warning: filter file %s does not mention trees.%s - will be kept as-is" % (options.filterfile, tree) + print(" Warning: filter file %s does not mention trees.%s - will be kept as-is" % (options.filterfile, tree)) else: queueForRemoval(tree) @@ -297,22 +306,22 @@ def removeList(count=0): global remove remove = remove - keep if(count > 10): - print "Giving up - %dth attempt at removal." % count + print("Giving up - %dth attempt at removal." % count) sys.exit(1) if(options.verbose>1): - print "%d items to remove - try #%d" % (len(remove),count) + print("%d items to remove - try #%d" % (len(remove),count)) if(len(remove)>0): oldcount = len(remove) hackerrfile=os.path.join(options.tmpdir, "REMOVE.err") removefile = os.path.join(options.tmpdir, "REMOVE.lst") fi = open(removefile, 'wb') for i in remove: - print >>fi, i + print(i, file=fi) fi.close() rc = runcmd("icupkg","-r %s %s 2> %s" % (removefile,outfile,hackerrfile),True) if rc is not 0: if(options.verbose>5): - print "## Damage control, trying to parse stderr from icupkg.." + print("## Damage control, trying to parse stderr from icupkg..") fi = open(hackerrfile, 'rb') erritems = fi.readlines() fi.close() @@ -324,15 +333,15 @@ def removeList(count=0): if m: toDelete = m.group(1) if(options.verbose > 5): - print "<< %s added to delete" % toDelete + print("<< %s added to delete" % toDelete) remove.add(toDelete) else: - print "ERROR: could not match errline: %s" % line + print("ERROR: could not match errline: %s" % line) sys.exit(1) if(options.verbose > 5): - print " now %d items to remove" % len(remove) + print(" now %d items to remove" % len(remove)) if(oldcount == len(remove)): - print " ERROR: could not add any mor eitems to remove. Fail." + print(" ERROR: could not add any mor eitems to remove. Fail.") sys.exit(1) removeList(count+1) diff --git a/tools/icu/shrink-icu-src.py b/tools/icu/shrink-icu-src.py index e18243542408a5..b6e456279b32c2 100644 --- a/tools/icu/shrink-icu-src.py +++ b/tools/icu/shrink-icu-src.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +from __future__ import print_function import optparse import os import re @@ -29,11 +30,11 @@ (options, args) = parser.parse_args() if os.path.isdir(options.icusmall): - print 'Deleting existing icusmall %s' % (options.icusmall) + print('Deleting existing icusmall %s' % (options.icusmall)) shutil.rmtree(options.icusmall) if not os.path.isdir(options.icusrc): - print 'Missing source ICU dir --icusrc=%s' % (options.icusrc) + print('Missing source ICU dir --icusrc=%s' % (options.icusrc)) sys.exit(1) @@ -73,7 +74,7 @@ def icu_ignore(dir, files): def icu_info(icu_full_path): uvernum_h = os.path.join(icu_full_path, 'source/common/unicode/uvernum.h') if not os.path.isfile(uvernum_h): - print ' Error: could not load %s - is ICU installed?' % uvernum_h + print(' Error: could not load %s - is ICU installed?' % uvernum_h) sys.exit(1) icu_ver_major = None matchVerExp = r'^\s*#define\s+U_ICU_VERSION_SHORT\s+"([^"]*)".*' @@ -83,25 +84,25 @@ def icu_info(icu_full_path): if m: icu_ver_major = m.group(1) if not icu_ver_major: - print ' Could not read U_ICU_VERSION_SHORT version from %s' % uvernum_h + print(' Could not read U_ICU_VERSION_SHORT version from %s' % uvernum_h) sys.exit(1) icu_endianness = sys.byteorder[0]; # TODO(srl295): EBCDIC should be 'e' return (icu_ver_major, icu_endianness) (icu_ver_major, icu_endianness) = icu_info(options.icusrc) -print "icudt%s%s" % (icu_ver_major, icu_endianness) +print("icudt%s%s" % (icu_ver_major, icu_endianness)) src_datafile = os.path.join(options.icutmp, "icusmdt%s.dat" % (icu_ver_major)) dst_datafile = os.path.join(options.icusmall, "source","data","in", "icudt%s%s.dat" % (icu_ver_major, icu_endianness)) if not os.path.isfile(src_datafile): - print "Could not find source datafile %s - did you build small-icu node?" % src_datafile + print("Could not find source datafile %s - did you build small-icu node?" % src_datafile) sys.exit(1) else: - print "will use small datafile %s" % (src_datafile) -print '%s --> %s' % (options.icusrc, options.icusmall) + print("will use small datafile %s" % (src_datafile)) +print('%s --> %s' % (options.icusrc, options.icusmall)) shutil.copytree(options.icusrc, options.icusmall, ignore=icu_ignore) -print '%s --> %s' % (src_datafile, dst_datafile) +print('%s --> %s' % (src_datafile, dst_datafile)) # now, make the data dir (since we ignored it) os.mkdir(os.path.join(os.path.join(options.icusmall, "source", "data"))) @@ -114,13 +115,13 @@ def icu_info(icu_full_path): readme_name = os.path.join(options.icusmall, "README-SMALL-ICU.txt" ) fi = open(readme_name, 'wb') -print >>fi, "Small ICU sources - auto generated by shrink-icu-src.py" -print >>fi, "" -print >>fi, "This directory contains the ICU subset used by --with-intl=small-icu (the default)" -print >>fi, "It is a strict subset of ICU %s source files with the following exception(s):" % (icu_ver_major) -print >>fi, "* %s : Reduced-size data file" % (dst_datafile) -print >>fi, "" -print >>fi, "" -print >>fi, "To rebuild this directory, see ../../tools/icu/README.md" -print >>fi, "" +print("Small ICU sources - auto generated by shrink-icu-src.py", file=fi) +print("", file=fi) +print("This directory contains the ICU subset used by --with-intl=small-icu (the default)", file=fi) +print("It is a strict subset of ICU %s source files with the following exception(s):" % (icu_ver_major), file=fi) +print("* %s : Reduced-size data file" % (dst_datafile), file=fi) +print("", file=fi) +print("", file=fi) +print("To rebuild this directory, see ../../tools/icu/README.md", file=fi) +print("", file=fi) fi.close() diff --git a/tools/inspector_protocol/CheckProtocolCompatibility.py b/tools/inspector_protocol/CheckProtocolCompatibility.py index c70162a2a44ef0..900611188e198b 100755 --- a/tools/inspector_protocol/CheckProtocolCompatibility.py +++ b/tools/inspector_protocol/CheckProtocolCompatibility.py @@ -45,6 +45,7 @@ # # Adding --show_changes to the command line prints out a list of valid public API changes. +from __future__ import print_function import copy import os.path import optparse @@ -467,9 +468,9 @@ def main(): if arg_options.show_changes: changes = compare_schemas(domains, baseline_domains, True) if len(changes) > 0: - print " Public changes since %s:" % version + print(" Public changes since %s:" % version) for change in changes: - print " %s" % change + print(" %s" % change) if arg_options.stamp: with open(arg_options.stamp, 'a') as _: diff --git a/tools/inspector_protocol/CodeGenerator.py b/tools/inspector_protocol/CodeGenerator.py index e630b02985710f..8e1e275ba487cb 100644 --- a/tools/inspector_protocol/CodeGenerator.py +++ b/tools/inspector_protocol/CodeGenerator.py @@ -14,6 +14,13 @@ except ImportError: import simplejson as json + +try: + xrange # Python 2 +except NameError: + xrange = range # Python 3 + + # Path handling for libraries and templates # Paths have to be normalized because Jinja uses the exact template path to # determine the hash used in the cache filename, and we need a pre-caching step diff --git a/tools/inspector_protocol/ConvertProtocolToJSON.py b/tools/inspector_protocol/ConvertProtocolToJSON.py index 56fc09d78cb18f..ed3cdfeef9ee47 100644 --- a/tools/inspector_protocol/ConvertProtocolToJSON.py +++ b/tools/inspector_protocol/ConvertProtocolToJSON.py @@ -1,3 +1,4 @@ +from __future__ import print_function # Copyright 2017 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -157,7 +158,7 @@ def parse(data): enumliterals.append(trimLine) continue - print 'Error in %s:%s, illegal token: \t%s' % (file_name, i, line) + print('Error in %s:%s, illegal token: \t%s' % (file_name, i, line)) sys.exit(1) return protocol diff --git a/tools/install.py b/tools/install.py index c97518d4220788..c7f75e3540d799 100755 --- a/tools/install.py +++ b/tools/install.py @@ -1,5 +1,6 @@ #!/usr/bin/env python +from __future__ import print_function import ast import errno import os @@ -25,11 +26,11 @@ def load_config(): def try_unlink(path): try: os.unlink(path) - except OSError, e: + except OSError as e: if e.errno != errno.ENOENT: raise def try_symlink(source_path, link_path): - print 'symlinking %s -> %s' % (source_path, link_path) + print('symlinking %s -> %s' % (source_path, link_path)) try_unlink(link_path) try_mkdir_r(os.path.dirname(link_path)) os.symlink(source_path, link_path) @@ -37,7 +38,7 @@ def try_symlink(source_path, link_path): def try_mkdir_r(path): try: os.makedirs(path) - except OSError, e: + except OSError as e: if e.errno != errno.EEXIST: raise def try_rmdir_r(path): @@ -45,7 +46,7 @@ def try_rmdir_r(path): while path.startswith(install_path): try: os.rmdir(path) - except OSError, e: + except OSError as e: if e.errno == errno.ENOTEMPTY: return if e.errno == errno.ENOENT: return raise @@ -60,14 +61,14 @@ def mkpaths(path, dst): def try_copy(path, dst): source_path, target_path = mkpaths(path, dst) - print 'installing %s' % target_path + print('installing %s' % target_path) try_mkdir_r(os.path.dirname(target_path)) try_unlink(target_path) # prevent ETXTBSY errors return shutil.copy2(source_path, target_path) def try_remove(path, dst): source_path, target_path = mkpaths(path, dst) - print 'removing %s' % target_path + print('removing %s' % target_path) try_unlink(target_path) try_rmdir_r(os.path.dirname(target_path)) diff --git a/tools/jinja2/_compat.py b/tools/jinja2/_compat.py index 61d85301a4a9ef..7c46f1f472fcda 100644 --- a/tools/jinja2/_compat.py +++ b/tools/jinja2/_compat.py @@ -10,6 +10,8 @@ :copyright: Copyright 2013 by the Jinja team, see AUTHORS. :license: BSD, see LICENSE for details. """ +# flake8: noqa + import sys PY2 = sys.version_info[0] == 2 diff --git a/tools/jinja2/bccache.py b/tools/jinja2/bccache.py index 080e527cabf33b..95b7dae107c0a3 100644 --- a/tools/jinja2/bccache.py +++ b/tools/jinja2/bccache.py @@ -14,6 +14,8 @@ :copyright: (c) 2017 by the Jinja Team. :license: BSD. """ +# flake8: noqa + from os import path, listdir import os import sys diff --git a/tools/js2c.py b/tools/js2c.py index 40f2bc6f48f483..47590dd473bbf0 100755 --- a/tools/js2c.py +++ b/tools/js2c.py @@ -37,6 +37,11 @@ import string import hashlib +try: + xrange # Python 2 +except NameError: + xrange = range # Python 3 + def ToCArray(elements, step=10): slices = (elements[i:i+step] for i in xrange(0, len(elements), step)) diff --git a/tools/markupsafe/_compat.py b/tools/markupsafe/_compat.py index 29e4a3dac13f28..4337f12e8b9226 100644 --- a/tools/markupsafe/_compat.py +++ b/tools/markupsafe/_compat.py @@ -8,6 +8,8 @@ :copyright: (c) 2013 by Armin Ronacher. :license: BSD, see LICENSE for more details. """ +# flake8: noqa + import sys PY2 = sys.version_info[0] == 2 diff --git a/tools/run-valgrind.py b/tools/run-valgrind.py index cad3e7ec6954d2..67fa424584e63a 100755 --- a/tools/run-valgrind.py +++ b/tools/run-valgrind.py @@ -27,6 +27,7 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +from __future__ import print_function from os import path import subprocess import sys @@ -40,12 +41,12 @@ ] if len(sys.argv) < 2: - print 'Please provide an executable to analyze.' + print('Please provide an executable to analyze.') sys.exit(1) executable = path.join(NODE_ROOT, sys.argv[1]) if not path.exists(executable): - print 'Cannot find the file specified: %s' % executable + print('Cannot find the file specified: %s' % executable) sys.exit(1) # Compute the command line. diff --git a/tools/specialize_node_d.py b/tools/specialize_node_d.py index bb5ef5a57ce916..bfed17f741d2a6 100755 --- a/tools/specialize_node_d.py +++ b/tools/specialize_node_d.py @@ -6,15 +6,16 @@ # Specialize node.d for given flavor (`freebsd`) and arch (`x64` or `ia32`) # +from __future__ import print_function import re import sys if len(sys.argv) != 5: - print "usage: specialize_node_d.py outfile src/node.d flavor arch" + print("usage: specialize_node_d.py outfile src/node.d flavor arch") sys.exit(2); -outfile = file(sys.argv[1], 'w'); -infile = file(sys.argv[2], 'r'); +outfile = open(sys.argv[1], 'w'); +infile = open(sys.argv[2], 'r'); flavor = sys.argv[3]; arch = sys.argv[4]; diff --git a/tools/test.py b/tools/test.py index c5c9fb53c07626..13a271217ee4e2 100755 --- a/tools/test.py +++ b/tools/test.py @@ -28,6 +28,7 @@ # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +from __future__ import print_function import imp import logging import optparse @@ -49,6 +50,22 @@ from datetime import datetime from Queue import Queue, Empty +try: + cmp # Python 2 +except NameError: + def cmp(x, y): # Python 3 + return (x > y) - (x < y) + +try: + reduce # Python 2 +except NameError: # Python 3 + from functools import reduce + +try: + xrange # Python 2 +except NameError: + xrange = range # Python 3 + logger = logging.getLogger('testrunner') skip_regex = re.compile(r'# SKIP\S*\s+(.*)', re.IGNORECASE) @@ -87,11 +104,11 @@ def PrintFailureHeader(self, test): negative_marker = '[negative] ' else: negative_marker = '' - print "=== %(label)s %(negative)s===" % { + print("=== %(label)s %(negative)s===" % { 'label': test.GetLabel(), 'negative': negative_marker - } - print "Path: %s" % "/".join(test.path) + }) + print("Path: %s" % "/".join(test.path)) def Run(self, tasks): self.Starting() @@ -109,9 +126,9 @@ def Run(self, tasks): for thread in threads: # Use a timeout so that signals (ctrl-c) will be processed. thread.join(timeout=10000000) - except (KeyboardInterrupt, SystemExit), e: + except (KeyboardInterrupt, SystemExit) as e: self.shutdown_event.set() - except Exception, e: + except Exception as e: # If there's an exception we schedule an interruption for any # remaining threads. self.shutdown_event.set() @@ -148,7 +165,7 @@ def RunSingle(self, parallel, thread_id): output = case.Run() output.diagnostic.append('ECONNREFUSED received, test retried') case.duration = (datetime.now() - start) - except IOError, e: + except IOError as e: return if self.shutdown_event.is_set(): return @@ -182,40 +199,40 @@ def EscapeCommand(command): class SimpleProgressIndicator(ProgressIndicator): def Starting(self): - print 'Running %i tests' % len(self.cases) + print('Running %i tests' % len(self.cases)) def Done(self): - print + print() for failed in self.failed: self.PrintFailureHeader(failed.test) if failed.output.stderr: - print "--- stderr ---" - print failed.output.stderr.strip() + print("--- stderr ---") + print(failed.output.stderr.strip()) if failed.output.stdout: - print "--- stdout ---" - print failed.output.stdout.strip() - print "Command: %s" % EscapeCommand(failed.command) + print("--- stdout ---") + print(failed.output.stdout.strip()) + print("Command: %s" % EscapeCommand(failed.command)) if failed.HasCrashed(): - print "--- %s ---" % PrintCrashed(failed.output.exit_code) + print("--- %s ---" % PrintCrashed(failed.output.exit_code)) if failed.HasTimedOut(): - print "--- TIMEOUT ---" + print("--- TIMEOUT ---") if len(self.failed) == 0: - print "===" - print "=== All tests succeeded" - print "===" + print("===") + print("=== All tests succeeded") + print("===") else: - print - print "===" - print "=== %i tests failed" % len(self.failed) + print() + print("===") + print("=== %i tests failed" % len(self.failed)) if self.crashed > 0: - print "=== %i tests CRASHED" % self.crashed - print "===" + print("=== %i tests CRASHED" % self.crashed) + print("===") class VerboseProgressIndicator(SimpleProgressIndicator): def AboutToRun(self, case): - print 'Starting %s...' % case.GetLabel() + print('Starting %s...' % case.GetLabel()) sys.stdout.flush() def HasRun(self, output): @@ -226,7 +243,7 @@ def HasRun(self, output): outcome = 'FAIL' else: outcome = 'pass' - print 'Done running %s: %s' % (output.test.GetLabel(), outcome) + print('Done running %s: %s' % (output.test.GetLabel(), outcome)) class DotsProgressIndicator(SimpleProgressIndicator): @@ -362,9 +379,9 @@ def HasRun(self, output): ("because:" in line or "reason:" in line): if not printed_file: printed_file = True - print '==== %s ====' % command + print('==== %s ====' % command) self.failed.append(output) - print ' %s' % line + print(' %s' % line) def Done(self): pass @@ -393,15 +410,15 @@ def HasRun(self, output): self.PrintFailureHeader(output.test) stdout = output.output.stdout.strip() if len(stdout): - print self.templates['stdout'] % stdout + print(self.templates['stdout'] % stdout) stderr = output.output.stderr.strip() if len(stderr): - print self.templates['stderr'] % stderr - print "Command: %s" % EscapeCommand(output.command) + print(self.templates['stderr'] % stderr) + print("Command: %s" % EscapeCommand(output.command)) if output.HasCrashed(): - print "--- %s ---" % PrintCrashed(output.output.exit_code) + print("--- %s ---" % PrintCrashed(output.output.exit_code)) if output.HasTimedOut(): - print "--- TIMEOUT ---" + print("--- TIMEOUT ---") def Truncate(self, str, length): if length and (len(str) > (length - 3)): @@ -422,7 +439,7 @@ def PrintProgress(self, name): } status = self.Truncate(status, 78) self.last_status_length = len(status) - print status, + print(status, end=' ') sys.stdout.flush() @@ -437,7 +454,7 @@ def __init__(self, cases, flaky_tests_mode): super(ColorProgressIndicator, self).__init__(cases, flaky_tests_mode, templates) def ClearLine(self, last_line_length): - print "\033[1K\r", + print("\033[1K\r", end=' ') class MonochromeProgressIndicator(CompactProgressIndicator): @@ -453,7 +470,7 @@ def __init__(self, cases, flaky_tests_mode): super(MonochromeProgressIndicator, self).__init__(cases, flaky_tests_mode, templates) def ClearLine(self, last_line_length): - print ("\r" + (" " * last_line_length) + "\r"), + print(("\r" + (" " * last_line_length) + "\r"), end=' ') PROGRESS_INDICATORS = { @@ -616,7 +633,7 @@ def KillTimedOutProcess(context, pid): def RunProcess(context, timeout, args, **rest): - if context.verbose: print "#", " ".join(args) + if context.verbose: print("#", " ".join(args)) popen_args = args prev_error_mode = SEM_INVALID_VALUE; if utils.IsWindows(): @@ -695,7 +712,7 @@ def CheckedUnlink(name): while True: try: os.unlink(name) - except OSError, e: + except OSError as e: # On Windows unlink() fails if another process (typically a virus scanner # or the indexing service) has the file open. Those processes keep a # file open for a short time only, so yield and try again; it'll succeed. @@ -764,8 +781,8 @@ def disableCoreFiles(): else: os.close(fd_out) os.close(fd_err) - output = file(outname).read() - errors = file(errname).read() + output = open(outname).read() + errors = open(errname).read() CheckedUnlink(outname) CheckedUnlink(errname) @@ -1210,15 +1227,15 @@ def ParseCondition(expr): """Parses a logical expression into an Expression object""" tokens = Tokenizer(expr).Tokenize() if not tokens: - print "Malformed expression: '%s'" % expr + print("Malformed expression: '%s'" % expr) return None scan = Scanner(tokens) ast = ParseLogicalExpression(scan) if not ast: - print "Malformed expression: '%s'" % expr + print("Malformed expression: '%s'" % expr) return None if scan.HasMore(): - print "Malformed expression: '%s'" % expr + print("Malformed expression: '%s'" % expr) return None return ast @@ -1431,19 +1448,19 @@ def ProcessOptions(options): if options.run == [""]: options.run = None elif len(options.run) != 2: - print "The run argument must be two comma-separated integers." + print("The run argument must be two comma-separated integers.") return False else: try: options.run = map(int, options.run) except ValueError: - print "Could not parse the integers from the run argument." + print("Could not parse the integers from the run argument.") return False if options.run[0] < 0 or options.run[1] < 0: - print "The run argument cannot have negative integers." + print("The run argument cannot have negative integers.") return False if options.run[0] >= options.run[1]: - print "The test group to run (n) must be smaller than number of groups (m)." + print("The test group to run (n) must be smaller than number of groups (m).") return False if options.J: # inherit JOBS from environment if provided. some virtualised systems @@ -1451,7 +1468,7 @@ def ProcessOptions(options): cores = os.environ.get('JOBS') options.j = int(cores) if cores is not None else multiprocessing.cpu_count() if options.flaky_tests not in ["run", "skip", "dontcare"]: - print "Unknown flaky-tests mode %s" % options.flaky_tests + print("Unknown flaky-tests mode %s" % options.flaky_tests) return False return True @@ -1468,13 +1485,13 @@ def PrintReport(cases): def IsFailOk(o): return (len(o) == 2) and (FAIL in o) and (OKAY in o) unskipped = [c for c in cases if not SKIP in c.outcomes] - print REPORT_TEMPLATE % { + print(REPORT_TEMPLATE % { 'total': len(cases), 'skipped': len(cases) - len(unskipped), 'pass': len([t for t in unskipped if list(t.outcomes) == [PASS]]), 'fail_ok': len([t for t in unskipped if IsFailOk(t.outcomes)]), 'fail': len([t for t in unskipped if list(t.outcomes) == [FAIL]]) - } + }) class Pattern(object): @@ -1654,13 +1671,13 @@ def Main(): for mode in options.mode: vm = context.GetVm(arch, mode) if not exists(vm): - print "Can't find shell executable: '%s'" % vm + print("Can't find shell executable: '%s'" % vm) continue archEngineContext = Execute([vm, "-p", "process.arch"], context) vmArch = archEngineContext.stdout.rstrip() if archEngineContext.exit_code is not 0 or vmArch == "undefined": - print "Can't determine the arch of: '%s'" % vm - print archEngineContext.stderr.rstrip() + print("Can't determine the arch of: '%s'" % vm) + print(archEngineContext.stderr.rstrip()) continue env = { 'mode': mode, @@ -1693,15 +1710,15 @@ def Main(): if key in visited: continue visited.add(key) - print "--- begin source: %s ---" % test.GetLabel() + print("--- begin source: %s ---" % test.GetLabel()) source = test.GetSource().strip() - print source - print "--- end source: %s ---" % test.GetLabel() + print(source) + print("--- end source: %s ---" % test.GetLabel()) return 0 if options.warn_unused: for rule in globally_unused_rules: - print "Rule for '%s' was not used." % '/'.join([str(s) for s in rule.path]) + print("Rule for '%s' was not used." % '/'.join([str(s) for s in rule.path])) tempdir = os.environ.get('NODE_TEST_DIR') or options.temp_dir if tempdir: @@ -1710,7 +1727,7 @@ def Main(): os.makedirs(tempdir) except OSError as exception: if exception.errno != errno.EEXIST: - print "Could not create the temporary directory", options.temp_dir + print("Could not create the temporary directory", options.temp_dir) sys.exit(1) if options.report: @@ -1737,7 +1754,7 @@ def DoSkip(case): len(cases_to_run), options.run[1]) ] if len(cases_to_run) == 0: - print "No tests to run." + print("No tests to run.") return 1 else: try: @@ -1748,13 +1765,13 @@ def DoSkip(case): result = 1 duration = time.time() - start except KeyboardInterrupt: - print "Interrupted" + print("Interrupted") return 1 if options.time: # Write the times to stderr to make it easy to separate from the # test output. - print + print() sys.stderr.write("--- Total time: %s ---\n" % FormatTime(duration)) timed_tests = [ t.case for t in cases_to_run if not t.case.duration is None ] timed_tests.sort(lambda a, b: a.CompareTime(b)) From c26261d9b072dc176c8de8cb1ac2154221d8f238 Mon Sep 17 00:00:00 2001 From: cclauss Date: Tue, 23 Oct 2018 18:54:53 +0200 Subject: [PATCH 2/4] No noqa --- tools/jinja2/_compat.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tools/jinja2/_compat.py b/tools/jinja2/_compat.py index 7c46f1f472fcda..61d85301a4a9ef 100644 --- a/tools/jinja2/_compat.py +++ b/tools/jinja2/_compat.py @@ -10,8 +10,6 @@ :copyright: Copyright 2013 by the Jinja team, see AUTHORS. :license: BSD, see LICENSE for details. """ -# flake8: noqa - import sys PY2 = sys.version_info[0] == 2 From 78c664d183c1f7adc86a111107ad5a4755aebbe3 Mon Sep 17 00:00:00 2001 From: cclauss Date: Tue, 23 Oct 2018 18:55:57 +0200 Subject: [PATCH 3/4] No noqa --- tools/jinja2/bccache.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tools/jinja2/bccache.py b/tools/jinja2/bccache.py index 95b7dae107c0a3..080e527cabf33b 100644 --- a/tools/jinja2/bccache.py +++ b/tools/jinja2/bccache.py @@ -14,8 +14,6 @@ :copyright: (c) 2017 by the Jinja Team. :license: BSD. """ -# flake8: noqa - from os import path, listdir import os import sys From 63273036450d7025b39a6fa0de134ec282db41d7 Mon Sep 17 00:00:00 2001 From: cclauss Date: Tue, 23 Oct 2018 18:56:42 +0200 Subject: [PATCH 4/4] No noqa --- tools/markupsafe/_compat.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tools/markupsafe/_compat.py b/tools/markupsafe/_compat.py index 4337f12e8b9226..29e4a3dac13f28 100644 --- a/tools/markupsafe/_compat.py +++ b/tools/markupsafe/_compat.py @@ -8,8 +8,6 @@ :copyright: (c) 2013 by Armin Ronacher. :license: BSD, see LICENSE for more details. """ -# flake8: noqa - import sys PY2 = sys.version_info[0] == 2