diff --git a/.restyled.yaml b/.restyled.yaml index 7418df7a01b919..ccb5fcf477fc7b 100644 --- a/.restyled.yaml +++ b/.restyled.yaml @@ -65,6 +65,7 @@ exclude: - "third_party/bluez/repo/**/*" - "third_party/cirque/repo/**/*" - "third_party/nanopb/repo/**/*" + - "src/controller/python/chip/clusters/CHIPClusters.py" # generated file changed_paths: @@ -198,3 +199,13 @@ restylers: include: - "**/*.sh" - "**/*.bash" + - name: autopep8 + image: 'restyled/restyler-autopep8:v1.5.7' + command: + - autopep8 + - '--in-place' + arguments: [] + include: + - '**/*.py' + interpreters: + - python diff --git a/build/chip/java/jar_runner.py b/build/chip/java/jar_runner.py index 59cb00e594decc..bd8e1762d36c2b 100755 --- a/build/chip/java/jar_runner.py +++ b/build/chip/java/jar_runner.py @@ -28,69 +28,69 @@ def IsExecutable(path): - """Returns whether file at |path| exists and is executable. + """Returns whether file at |path| exists and is executable. - Args: - path: absolute or relative path to test. + Args: + path: absolute or relative path to test. - Returns: - True if the file at |path| exists, False otherwise. - """ - return os.path.isfile(path) and os.access(path, os.X_OK) + Returns: + True if the file at |path| exists, False otherwise. + """ + return os.path.isfile(path) and os.access(path, os.X_OK) def FindCommand(command): - """Looks up for |command| in PATH. - - Args: - command: name of the command to lookup, if command is a relative or - absolute path (i.e. contains some path separator) then only that - path will be tested. - - Returns: - Full path to command or None if the command was not found. - - On Windows, this respects the PATHEXT environment variable when the - command name does not have an extension. - """ - fpath, _ = os.path.split(command) - if fpath: - if IsExecutable(command): - return command - - if sys.platform == 'win32': - # On Windows, if the command does not have an extension, cmd.exe will - # try all extensions from PATHEXT when resolving the full path. - command, ext = os.path.splitext(command) - if not ext: - exts = os.environ['PATHEXT'].split(os.path.pathsep) + """Looks up for |command| in PATH. + + Args: + command: name of the command to lookup, if command is a relative or + absolute path (i.e. contains some path separator) then only that + path will be tested. + + Returns: + Full path to command or None if the command was not found. + + On Windows, this respects the PATHEXT environment variable when the + command name does not have an extension. + """ + fpath, _ = os.path.split(command) + if fpath: + if IsExecutable(command): + return command + + if sys.platform == 'win32': + # On Windows, if the command does not have an extension, cmd.exe will + # try all extensions from PATHEXT when resolving the full path. + command, ext = os.path.splitext(command) + if not ext: + exts = os.environ['PATHEXT'].split(os.path.pathsep) + else: + exts = [ext] else: - exts = [ext] - else: - exts = [''] + exts = [''] - for path in os.environ['PATH'].split(os.path.pathsep): - for ext in exts: - path = os.path.join(path, command) + ext - if IsExecutable(path): - return path + for path in os.environ['PATH'].split(os.path.pathsep): + for ext in exts: + path = os.path.join(path, command) + ext + if IsExecutable(path): + return path - return None + return None def main(): - java_path = FindCommand('jar') - if not java_path: - sys.stderr.write('jar: command not found\n') - sys.exit(EXIT_FAILURE) + java_path = FindCommand('jar') + if not java_path: + sys.stderr.write('jar: command not found\n') + sys.exit(EXIT_FAILURE) - args = sys.argv[1:] - if len(args) < 1: - sys.stderr.write('usage: %s [jar_args]...\n' % sys.argv[0]) - sys.exit(EXIT_FAILURE) + args = sys.argv[1:] + if len(args) < 1: + sys.stderr.write('usage: %s [jar_args]...\n' % sys.argv[0]) + sys.exit(EXIT_FAILURE) - return subprocess.check_call([java_path] + args) + return subprocess.check_call([java_path] + args) if __name__ == '__main__': - sys.exit(main()) \ No newline at end of file + sys.exit(main()) diff --git a/build/chip/java/javac_runner.py b/build/chip/java/javac_runner.py index 1e141b52b90202..c6234cc2263a8c 100755 --- a/build/chip/java/javac_runner.py +++ b/build/chip/java/javac_runner.py @@ -28,93 +28,93 @@ def IsExecutable(path): - """Returns whether file at |path| exists and is executable. + """Returns whether file at |path| exists and is executable. - Args: - path: absolute or relative path to test. + Args: + path: absolute or relative path to test. - Returns: - True if the file at |path| exists, False otherwise. - """ - return os.path.isfile(path) and os.access(path, os.X_OK) + Returns: + True if the file at |path| exists, False otherwise. + """ + return os.path.isfile(path) and os.access(path, os.X_OK) def FindCommand(command): - """Looks up for |command| in PATH. - - Args: - command: name of the command to lookup, if command is a relative or absolute - path (i.e. contains some path separator) then only that path will be - tested. - - Returns: - Full path to command or None if the command was not found. - - On Windows, this respects the PATHEXT environment variable when the - command name does not have an extension. - """ - fpath, _ = os.path.split(command) - if fpath: - if IsExecutable(command): - return command - - if sys.platform == 'win32': - # On Windows, if the command does not have an extension, cmd.exe will - # try all extensions from PATHEXT when resolving the full path. - command, ext = os.path.splitext(command) - if not ext: - exts = os.environ['PATHEXT'].split(os.path.pathsep) + """Looks up for |command| in PATH. + + Args: + command: name of the command to lookup, if command is a relative or absolute + path (i.e. contains some path separator) then only that path will be + tested. + + Returns: + Full path to command or None if the command was not found. + + On Windows, this respects the PATHEXT environment variable when the + command name does not have an extension. + """ + fpath, _ = os.path.split(command) + if fpath: + if IsExecutable(command): + return command + + if sys.platform == 'win32': + # On Windows, if the command does not have an extension, cmd.exe will + # try all extensions from PATHEXT when resolving the full path. + command, ext = os.path.splitext(command) + if not ext: + exts = os.environ['PATHEXT'].split(os.path.pathsep) + else: + exts = [ext] else: - exts = [ext] - else: - exts = [''] + exts = [''] - for path in os.environ['PATH'].split(os.path.pathsep): - for ext in exts: - path = os.path.join(path, command) + ext - if IsExecutable(path): - return path + for path in os.environ['PATH'].split(os.path.pathsep): + for ext in exts: + path = os.path.join(path, command) + ext + if IsExecutable(path): + return path - return None + return None def main(): - java_path = FindCommand('javac') - if not java_path: - sys.stderr.write('javac: command not found\n') - sys.exit(EXIT_FAILURE) - - parser = argparse.ArgumentParser('Javac runner') - parser.add_argument( - '--classdir', - dest='classdir', - required=True, - help='Directory that will contain class files') - parser.add_argument( - '--outfile', - dest='outfile', - required=True, - help='Output file containing a list of classes') - parser.add_argument( - 'rest', metavar='JAVAC_ARGS', nargs='*', help='Argumets to pass to javac') - - args = parser.parse_args() - if not os.path.isdir(args.classdir): - os.makedirs(args.classdir) - retcode = subprocess.check_call([java_path] + args.rest) - if retcode != EXIT_SUCCESS: - return retcode - - with open(args.outfile, 'wt') as f: - prefixlen = len(args.classdir) + 1 - for root, dirnames, filenames in os.walk(args.classdir): - for filename in filenames: - if filename.endswith('.class'): - f.write(os.path.join(root[prefixlen:], filename)) - f.write('\n') - - return EXIT_SUCCESS + java_path = FindCommand('javac') + if not java_path: + sys.stderr.write('javac: command not found\n') + sys.exit(EXIT_FAILURE) + + parser = argparse.ArgumentParser('Javac runner') + parser.add_argument( + '--classdir', + dest='classdir', + required=True, + help='Directory that will contain class files') + parser.add_argument( + '--outfile', + dest='outfile', + required=True, + help='Output file containing a list of classes') + parser.add_argument( + 'rest', metavar='JAVAC_ARGS', nargs='*', help='Argumets to pass to javac') + + args = parser.parse_args() + if not os.path.isdir(args.classdir): + os.makedirs(args.classdir) + retcode = subprocess.check_call([java_path] + args.rest) + if retcode != EXIT_SUCCESS: + return retcode + + with open(args.outfile, 'wt') as f: + prefixlen = len(args.classdir) + 1 + for root, dirnames, filenames in os.walk(args.classdir): + for filename in filenames: + if filename.endswith('.class'): + f.write(os.path.join(root[prefixlen:], filename)) + f.write('\n') + + return EXIT_SUCCESS if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/build/chip/linux/gen_gdbus_wrapper.py b/build/chip/linux/gen_gdbus_wrapper.py index 67bf46ba735c5e..263d5b72c37aff 100755 --- a/build/chip/linux/gen_gdbus_wrapper.py +++ b/build/chip/linux/gen_gdbus_wrapper.py @@ -28,8 +28,7 @@ def main(argv): parser.add_argument( "--output_c", - help= - "The source file to generate containing the GDBus proxy implementation" + help="The source file to generate containing the GDBus proxy implementation" ) parser.add_argument( @@ -65,7 +64,8 @@ def main(argv): gdbus_args = ["gdbus-codegen", "--body", "--output", options.output_c ] + extra_args + [options.input_file] subprocess.check_call(gdbus_args) - sed_args = ["sed", "-i", "s/config\.h/BuildConfig.h/g", options.output_c] + sed_args = ["sed", "-i", + "s/config\.h/BuildConfig.h/g", options.output_c] subprocess.check_call(sed_args) if options.output_h: diff --git a/build/chip/write_buildconfig_header.py b/build/chip/write_buildconfig_header.py index 9352e960d98c4c..b50d71aaa81895 100755 --- a/build/chip/write_buildconfig_header.py +++ b/build/chip/write_buildconfig_header.py @@ -57,74 +57,74 @@ class Options: - def __init__(self, output, rulename, header_guard, defines): - self.output = output - self.rulename = rulename - self.header_guard = header_guard - self.defines = defines + def __init__(self, output, rulename, header_guard, defines): + self.output = output + self.rulename = rulename + self.header_guard = header_guard + self.defines = defines def GetOptions(): - parser = optparse.OptionParser() - parser.add_option('--output', help="Output header name inside --gen-dir.") - parser.add_option('--rulename', - help="Helpful name of build rule for including in the " + - "comment at the top of the file.") - parser.add_option('--gen-dir', - help="Path to root of generated file directory tree.") - parser.add_option('--definitions', - help="Name of the response file containing the defines.") - cmdline_options, cmdline_flags = parser.parse_args() - - # Compute header guard by replacing some chars with _ and upper-casing. - header_guard = cmdline_options.output.upper() - header_guard = \ - header_guard.replace('/', '_').replace('\\', '_').replace('.', '_') - header_guard += '_' - - # The actual output file is inside the gen dir. - output = os.path.join(cmdline_options.gen_dir, cmdline_options.output) - - # Definition file in GYP is newline separated, in GN they are shell formatted. - # shlex can parse both of these. - with open(cmdline_options.definitions, 'r') as def_file: - defs = shlex.split(def_file.read()) - defines_index = defs.index('--defines') - - # Everything after --defines are defines. true/false are remapped to 1/0, - # everything else is passed through. - defines = [] - for define in defs[defines_index + 1 :]: - equals_index = define.index('=') - key = define[:equals_index] - value = define[equals_index + 1:] - - # Canonicalize and validate the value. - if value == 'true': - value = '1' - elif value == 'false': - value = '0' - defines.append((key, str(value))) - - return Options(output=output, - rulename=cmdline_options.rulename, - header_guard=header_guard, - defines=defines) + parser = optparse.OptionParser() + parser.add_option('--output', help="Output header name inside --gen-dir.") + parser.add_option('--rulename', + help="Helpful name of build rule for including in the " + + "comment at the top of the file.") + parser.add_option('--gen-dir', + help="Path to root of generated file directory tree.") + parser.add_option('--definitions', + help="Name of the response file containing the defines.") + cmdline_options, cmdline_flags = parser.parse_args() + + # Compute header guard by replacing some chars with _ and upper-casing. + header_guard = cmdline_options.output.upper() + header_guard = \ + header_guard.replace('/', '_').replace('\\', '_').replace('.', '_') + header_guard += '_' + + # The actual output file is inside the gen dir. + output = os.path.join(cmdline_options.gen_dir, cmdline_options.output) + + # Definition file in GYP is newline separated, in GN they are shell formatted. + # shlex can parse both of these. + with open(cmdline_options.definitions, 'r') as def_file: + defs = shlex.split(def_file.read()) + defines_index = defs.index('--defines') + + # Everything after --defines are defines. true/false are remapped to 1/0, + # everything else is passed through. + defines = [] + for define in defs[defines_index + 1:]: + equals_index = define.index('=') + key = define[:equals_index] + value = define[equals_index + 1:] + + # Canonicalize and validate the value. + if value == 'true': + value = '1' + elif value == 'false': + value = '0' + defines.append((key, str(value))) + + return Options(output=output, + rulename=cmdline_options.rulename, + header_guard=header_guard, + defines=defines) def WriteHeader(options): - with open(options.output, 'w') as output_file: - output_file.write("// Generated by write_buildconfig_header.py\n") - if options.rulename: - output_file.write('// From "' + options.rulename + '"\n') + with open(options.output, 'w') as output_file: + output_file.write("// Generated by write_buildconfig_header.py\n") + if options.rulename: + output_file.write('// From "' + options.rulename + '"\n') - output_file.write('\n#ifndef %s\n' % options.header_guard) - output_file.write('#define %s\n\n' % options.header_guard) + output_file.write('\n#ifndef %s\n' % options.header_guard) + output_file.write('#define %s\n\n' % options.header_guard) - for pair in options.defines: - output_file.write('#define %s %s\n' % pair) + for pair in options.defines: + output_file.write('#define %s %s\n' % pair) - output_file.write('\n#endif // %s\n' % options.header_guard) + output_file.write('\n#endif // %s\n' % options.header_guard) options = GetOptions() diff --git a/build/config/linux/pkg-config.py b/build/config/linux/pkg-config.py index 1adbb520c83718..2458ebe41dccb7 100755 --- a/build/config/linux/pkg-config.py +++ b/build/config/linux/pkg-config.py @@ -69,198 +69,197 @@ def SetConfigPath(options): - """Set the PKG_CONFIG_LIBDIR environment variable. + """Set the PKG_CONFIG_LIBDIR environment variable. - This takes into account any sysroot and architecture specification from the - options on the given command line. - """ + This takes into account any sysroot and architecture specification from the + options on the given command line. + """ - sysroot = options.sysroot - assert sysroot + sysroot = options.sysroot + assert sysroot - # Compute the library path name based on the architecture. - arch = options.arch - if sysroot and not arch: - print("You must specify an architecture via -a if using a sysroot.") - sys.exit(1) + # Compute the library path name based on the architecture. + arch = options.arch + if sysroot and not arch: + print("You must specify an architecture via -a if using a sysroot.") + sys.exit(1) - libdir = sysroot + '/usr/' + options.system_libdir + '/pkgconfig' - libdir += ':' + sysroot + '/usr/share/pkgconfig' - os.environ['PKG_CONFIG_LIBDIR'] = libdir - return libdir + libdir = sysroot + '/usr/' + options.system_libdir + '/pkgconfig' + libdir += ':' + sysroot + '/usr/share/pkgconfig' + os.environ['PKG_CONFIG_LIBDIR'] = libdir + return libdir def GetPkgConfigPrefixToStrip(options, args): - """Returns the prefix from pkg-config where packages are installed. - - This returned prefix is the one that should be stripped from the beginning of - directory names to take into account sysroots. - """ - # Some sysroots, like the Chromium OS ones, may generate paths that are not - # relative to the sysroot. For example, - # /path/to/chroot/build/x86-generic/usr/lib/pkgconfig/pkg.pc may have all - # paths relative to /path/to/chroot (i.e. prefix=/build/x86-generic/usr) - # instead of relative to /path/to/chroot/build/x86-generic (i.e prefix=/usr). - # To support this correctly, it's necessary to extract the prefix to strip - # from pkg-config's |prefix| variable. - prefix = subprocess.check_output([options.pkg_config, - "--variable=prefix"] + args, env=os.environ).decode('utf-8') - if prefix[-4] == '/usr': - return prefix[4:] - return prefix + """Returns the prefix from pkg-config where packages are installed. + + This returned prefix is the one that should be stripped from the beginning of + directory names to take into account sysroots. + """ + # Some sysroots, like the Chromium OS ones, may generate paths that are not + # relative to the sysroot. For example, + # /path/to/chroot/build/x86-generic/usr/lib/pkgconfig/pkg.pc may have all + # paths relative to /path/to/chroot (i.e. prefix=/build/x86-generic/usr) + # instead of relative to /path/to/chroot/build/x86-generic (i.e prefix=/usr). + # To support this correctly, it's necessary to extract the prefix to strip + # from pkg-config's |prefix| variable. + prefix = subprocess.check_output([options.pkg_config, + "--variable=prefix"] + args, env=os.environ).decode('utf-8') + if prefix[-4] == '/usr': + return prefix[4:] + return prefix def MatchesAnyRegexp(flag, list_of_regexps): - """Returns true if the first argument matches any regular expression in the - given list.""" - for regexp in list_of_regexps: - if regexp.search(flag) != None: - return True - return False + """Returns true if the first argument matches any regular expression in the + given list.""" + for regexp in list_of_regexps: + if regexp.search(flag) != None: + return True + return False def RewritePath(path, strip_prefix, sysroot): - """Rewrites a path by stripping the prefix and prepending the sysroot.""" - if os.path.isabs(path) and not path.startswith(sysroot): - if path.startswith(strip_prefix): - path = path[len(strip_prefix):] - path = path.lstrip('/') - return os.path.join(sysroot, path) - else: - return path + """Rewrites a path by stripping the prefix and prepending the sysroot.""" + if os.path.isabs(path) and not path.startswith(sysroot): + if path.startswith(strip_prefix): + path = path[len(strip_prefix):] + path = path.lstrip('/') + return os.path.join(sysroot, path) + else: + return path def main(): - parser = OptionParser() - parser.add_option('-d', '--debug', action='store_true') - parser.add_option('-p', action='store', dest='pkg_config', type='string', - default='pkg-config') - parser.add_option('-v', action='append', dest='strip_out', type='string') - parser.add_option('-s', action='store', dest='sysroot', type='string') - parser.add_option('-a', action='store', dest='arch', type='string') - parser.add_option('--system_libdir', action='store', dest='system_libdir', - type='string', default='lib') - parser.add_option('--atleast-version', action='store', - dest='atleast_version', type='string') - parser.add_option('--libdir', action='store_true', dest='libdir') - parser.add_option('--dridriverdir', action='store_true', dest='dridriverdir') - parser.add_option('--version-as-components', action='store_true', - dest='version_as_components') - (options, args) = parser.parse_args() - - # Make a list of regular expressions to strip out. - strip_out = [] - if options.strip_out != None: - for regexp in options.strip_out: - strip_out.append(re.compile(regexp)) - - if options.sysroot: - libdir = SetConfigPath(options) - if options.debug: - sys.stderr.write('PKG_CONFIG_LIBDIR=%s\n' % libdir) - prefix = GetPkgConfigPrefixToStrip(options, args) - else: - prefix = '' - - if options.atleast_version: - # When asking for the return value, just run pkg-config and print the return - # value, no need to do other work. - if not subprocess.call([options.pkg_config, - "--atleast-version=" + options.atleast_version] + - args): - print("true") + parser = OptionParser() + parser.add_option('-d', '--debug', action='store_true') + parser.add_option('-p', action='store', dest='pkg_config', type='string', + default='pkg-config') + parser.add_option('-v', action='append', dest='strip_out', type='string') + parser.add_option('-s', action='store', dest='sysroot', type='string') + parser.add_option('-a', action='store', dest='arch', type='string') + parser.add_option('--system_libdir', action='store', dest='system_libdir', + type='string', default='lib') + parser.add_option('--atleast-version', action='store', + dest='atleast_version', type='string') + parser.add_option('--libdir', action='store_true', dest='libdir') + parser.add_option('--dridriverdir', action='store_true', + dest='dridriverdir') + parser.add_option('--version-as-components', action='store_true', + dest='version_as_components') + (options, args) = parser.parse_args() + + # Make a list of regular expressions to strip out. + strip_out = [] + if options.strip_out != None: + for regexp in options.strip_out: + strip_out.append(re.compile(regexp)) + + if options.sysroot: + libdir = SetConfigPath(options) + if options.debug: + sys.stderr.write('PKG_CONFIG_LIBDIR=%s\n' % libdir) + prefix = GetPkgConfigPrefixToStrip(options, args) else: - print("false") - return 0 - - if options.version_as_components: - cmd = [options.pkg_config, "--modversion"] + args - try: - version_string = subprocess.check_output(cmd).decode('utf-8') - except Exception: - sys.stderr.write('Error from pkg-config.\n') - return 1 - print(json.dumps(list(map(int, version_string.strip().split("."))))) - return 0 - - - if options.libdir: - cmd = [options.pkg_config, "--variable=libdir"] + args + prefix = '' + + if options.atleast_version: + # When asking for the return value, just run pkg-config and print the return + # value, no need to do other work. + if not subprocess.call([options.pkg_config, + "--atleast-version=" + options.atleast_version] + + args): + print("true") + else: + print("false") + return 0 + + if options.version_as_components: + cmd = [options.pkg_config, "--modversion"] + args + try: + version_string = subprocess.check_output(cmd).decode('utf-8') + except Exception: + sys.stderr.write('Error from pkg-config.\n') + return 1 + print(json.dumps(list(map(int, version_string.strip().split("."))))) + return 0 + + if options.libdir: + cmd = [options.pkg_config, "--variable=libdir"] + args + if options.debug: + sys.stderr.write('Running: %s\n' % cmd) + try: + libdir = subprocess.check_output(cmd).decode('utf-8') + except Exception: + print("Error from pkg-config.") + return 1 + sys.stdout.write(libdir.strip()) + return 0 + + if options.dridriverdir: + cmd = [options.pkg_config, "--variable=dridriverdir"] + args + if options.debug: + sys.stderr.write('Running: %s\n' % cmd) + try: + dridriverdir = subprocess.check_output(cmd).decode('utf-8') + except Exception: + print("Error from pkg-config.") + return 1 + sys.stdout.write(dridriverdir.strip()) + return + + cmd = [options.pkg_config, "--cflags", "--libs"] + args if options.debug: - sys.stderr.write('Running: %s\n' % cmd) - try: - libdir = subprocess.check_output(cmd).decode('utf-8') - except Exception: - print("Error from pkg-config.") - return 1 - sys.stdout.write(libdir.strip()) - return 0 + sys.stderr.write('Running: %s\n' % ' '.join(cmd)) - if options.dridriverdir: - cmd = [options.pkg_config, "--variable=dridriverdir"] + args - if options.debug: - sys.stderr.write('Running: %s\n' % cmd) try: - dridriverdir = subprocess.check_output(cmd).decode('utf-8') + flag_string = subprocess.check_output(cmd).decode('utf-8') except Exception: - print("Error from pkg-config.") - return 1 - sys.stdout.write(dridriverdir.strip()) - return - - cmd = [options.pkg_config, "--cflags", "--libs"] + args - if options.debug: - sys.stderr.write('Running: %s\n' % ' '.join(cmd)) - - try: - flag_string = subprocess.check_output(cmd).decode('utf-8') - except Exception: - sys.stderr.write('Could not run pkg-config.\n') - return 1 - - # For now just split on spaces to get the args out. This will break if - # pkgconfig returns quoted things with spaces in them, but that doesn't seem - # to happen in practice. - all_flags = flag_string.strip().split(' ') - - - sysroot = options.sysroot - if not sysroot: - sysroot = '' - - includes = [] - cflags = [] - libs = [] - lib_dirs = [] - - for flag in all_flags[:]: - if len(flag) == 0 or MatchesAnyRegexp(flag, strip_out): - continue; - - if flag[:2] == '-l': - libs.append(RewritePath(flag[2:], prefix, sysroot)) - elif flag[:2] == '-L': - lib_dirs.append(RewritePath(flag[2:], prefix, sysroot)) - elif flag[:2] == '-I': - includes.append(RewritePath(flag[2:], prefix, sysroot)) - elif flag[:3] == '-Wl': - # Don't allow libraries to control ld flags. These should be specified - # only in build files. - pass - elif flag == '-pthread': - # Many libs specify "-pthread" which we don't need since we always include - # this anyway. Removing it here prevents a bunch of duplicate inclusions - # on the command line. - pass - else: - cflags.append(flag) - - # Output a GN array, the first one is the cflags, the second are the libs. The - # JSON formatter prints GN compatible lists when everything is a list of - # strings. - print(json.dumps([includes, cflags, libs, lib_dirs])) - return 0 + sys.stderr.write('Could not run pkg-config.\n') + return 1 + + # For now just split on spaces to get the args out. This will break if + # pkgconfig returns quoted things with spaces in them, but that doesn't seem + # to happen in practice. + all_flags = flag_string.strip().split(' ') + + sysroot = options.sysroot + if not sysroot: + sysroot = '' + + includes = [] + cflags = [] + libs = [] + lib_dirs = [] + + for flag in all_flags[:]: + if len(flag) == 0 or MatchesAnyRegexp(flag, strip_out): + continue + + if flag[:2] == '-l': + libs.append(RewritePath(flag[2:], prefix, sysroot)) + elif flag[:2] == '-L': + lib_dirs.append(RewritePath(flag[2:], prefix, sysroot)) + elif flag[:2] == '-I': + includes.append(RewritePath(flag[2:], prefix, sysroot)) + elif flag[:3] == '-Wl': + # Don't allow libraries to control ld flags. These should be specified + # only in build files. + pass + elif flag == '-pthread': + # Many libs specify "-pthread" which we don't need since we always include + # this anyway. Removing it here prevents a bunch of duplicate inclusions + # on the command line. + pass + else: + cflags.append(flag) + + # Output a GN array, the first one is the cflags, the second are the libs. The + # JSON formatter prints GN compatible lists when everything is a list of + # strings. + print(json.dumps([includes, cflags, libs, lib_dirs])) + return 0 if __name__ == '__main__': - sys.exit(main()) + sys.exit(main()) diff --git a/build/gn_run_binary.py b/build/gn_run_binary.py index b18a7c9c086e11..10ace2b8a62dfa 100644 --- a/build/gn_run_binary.py +++ b/build/gn_run_binary.py @@ -56,11 +56,12 @@ ret = subprocess.call(args) if ret != 0: - if ret <= -100: - # Windows error codes such as 0xC0000005 and 0xC0000409 are much easier to - # recognize and differentiate in hex. In order to print them as unsigned - # hex we need to add 4 Gig to them. - print('%s failed with exit code 0x%08X' % (sys.argv[1], ret + (1 << 32))) - else: - print('%s failed with exit code %d' % (sys.argv[1], ret)) + if ret <= -100: + # Windows error codes such as 0xC0000005 and 0xC0000409 are much easier to + # recognize and differentiate in hex. In order to print them as unsigned + # hex we need to add 4 Gig to them. + print('%s failed with exit code 0x%08X' % + (sys.argv[1], ret + (1 << 32))) + else: + print('%s failed with exit code %d' % (sys.argv[1], ret)) sys.exit(ret) diff --git a/config/esp32/components/chip/create_args_gn.py b/config/esp32/components/chip/create_args_gn.py index 6bf530f479e9ca..cab3ecb23cdf27 100644 --- a/config/esp32/components/chip/create_args_gn.py +++ b/config/esp32/components/chip/create_args_gn.py @@ -58,10 +58,12 @@ def get_compile_flags(src_file): replace = "-I%s" % args.idf_path replace_with = "-isystem%s" % args.idf_path - compile_flags = list(map(lambda f: ('"%s"' % f).replace(replace, replace_with), compile_flags)) + compile_flags = list(map(lambda f: ('"%s"' % f).replace( + replace, replace_with), compile_flags)) if args.filter_out: - filter_out = list(map(lambda f: ('"%s"' % f), args.filter_out.split(';'))) + filter_out = list(map(lambda f: ('"%s"' % f), + args.filter_out.split(';'))) compile_flags = [c for c in compile_flags if c not in filter_out] return compile_flags diff --git a/config/nrfconnect/chip-module/make_gn_args.py b/config/nrfconnect/chip-module/make_gn_args.py index eb9d4ad8a2fed9..f30cd459a6e7d4 100755 --- a/config/nrfconnect/chip-module/make_gn_args.py +++ b/config/nrfconnect/chip-module/make_gn_args.py @@ -34,9 +34,11 @@ '-W*', ] + def escape_strings(gn_args): return [[key, re.sub(GN_SPECIAL_CHARACTERS, r'\\\1', value)] for key, value in gn_args] + def write_gn_args(args): if args.module: sys.stdout.write('import("{}")\n'.format(args.module)) @@ -47,10 +49,13 @@ def write_gn_args(args): for key, value in args.arg_string: sys.stdout.write('{} = "{}"\n'.format(key, value)) - cflag_excludes = ', '.join(['"{}"'.format(exclude) for exclude in GN_CFLAG_EXCLUDES]) + cflag_excludes = ', '.join(['"{}"'.format(exclude) + for exclude in GN_CFLAG_EXCLUDES]) for key, value in args.arg_cflags: - sys.stdout.write('{} = filter_exclude(string_split("{}"), [{}])\n'.format(key, value, cflag_excludes)) + sys.stdout.write('{} = filter_exclude(string_split("{}"), [{}])\n'.format( + key, value, cflag_excludes)) + def main(): parser = argparse.ArgumentParser(fromfile_prefix_chars='@') @@ -63,5 +68,6 @@ def main(): args.arg_cflags = escape_strings(args.arg_cflags) write_gn_args(args) + if __name__ == "__main__": main() diff --git a/examples/pigweed-app/mobly_tests/echo_test.py b/examples/pigweed-app/mobly_tests/echo_test.py index 9ef179622d0ea0..179987f60f5dd9 100644 --- a/examples/pigweed-app/mobly_tests/echo_test.py +++ b/examples/pigweed-app/mobly_tests/echo_test.py @@ -26,8 +26,9 @@ def setup_class(self): object is created from this.''' self.ads = self.register_controller(pigweed_device) self.dut = self.ads[0] - self.dut.platform.flash() # Flashes the image passed in the configuration yml. - time.sleep(1) # give the device time to boot and register rpcs + # Flashes the image passed in the configuration yml. + self.dut.platform.flash() + time.sleep(1) # give the device time to boot and register rpcs def test_hello(self): ''' Tests EchoService.Echo ''' diff --git a/integrations/mobly/build/lib/chip_mobly/pigweed_device.py b/integrations/mobly/build/lib/chip_mobly/pigweed_device.py index a5aca0df258499..0edd75a6a5105e 100644 --- a/integrations/mobly/build/lib/chip_mobly/pigweed_device.py +++ b/integrations/mobly/build/lib/chip_mobly/pigweed_device.py @@ -14,7 +14,7 @@ import os from pathlib import Path -import serial # type: ignore +import serial # type: ignore import importlib from pw_hdlc.rpc import HdlcRpcClient @@ -31,7 +31,8 @@ class Error(Exception): class PigweedDevice: def __init__(self, device_tty, baud, platform_module=None, platform_args=None): - self.pw_rpc_client = HdlcRpcClient(serial.Serial(device_tty, baud), [PROTO]) + self.pw_rpc_client = HdlcRpcClient( + serial.Serial(device_tty, baud), [PROTO]) self._platform = None print("Platform args: %s" % platform_args) print("Platform module: %s" % platform_module) @@ -86,4 +87,5 @@ def _validate_config(config): required_keys = ["device_tty", "baud"] # A placeholder. for key in required_keys: if key not in config: - raise Error("Required key %s missing from config %s" % (key, config)) + raise Error("Required key %s missing from config %s" % + (key, config)) diff --git a/integrations/mobly/chip_mobly/pigweed_device.py b/integrations/mobly/chip_mobly/pigweed_device.py index d736ccc345248b..debfc6d519b00b 100644 --- a/integrations/mobly/chip_mobly/pigweed_device.py +++ b/integrations/mobly/chip_mobly/pigweed_device.py @@ -14,7 +14,7 @@ import os from pathlib import Path -import serial # type: ignore +import serial # type: ignore import importlib from pw_hdlc.rpc import HdlcRpcClient, default_channels @@ -88,4 +88,5 @@ def _validate_config(config): required_keys = ["device_tty", "baud"] # A placeholder. for key in required_keys: if key not in config: - raise Error("Required key %s missing from config %s" % (key, config)) + raise Error("Required key %s missing from config %s" % + (key, config)) diff --git a/integrations/mobly/hello_world_test.py b/integrations/mobly/hello_world_test.py index d4b2eb3513e1c9..6fdfcbca1fc931 100755 --- a/integrations/mobly/hello_world_test.py +++ b/integrations/mobly/hello_world_test.py @@ -13,7 +13,7 @@ # limitations under the License. from chip_mobly import pigweed_device -from mobly import asserts # type: ignore +from mobly import asserts # type: ignore from mobly import base_test from mobly import test_runner diff --git a/integrations/mobly/setup.py b/integrations/mobly/setup.py index 64165a3aa285f9..2e681af44953dc 100644 --- a/integrations/mobly/setup.py +++ b/integrations/mobly/setup.py @@ -14,7 +14,7 @@ """chip_mobly""" -import setuptools # type: ignore +import setuptools # type: ignore setuptools.setup( name='chip_mobly', diff --git a/scripts/build/build/__init__.py b/scripts/build/build/__init__.py index 0abe8474c5e9fe..87a13ce40011d6 100644 --- a/scripts/build/build/__init__.py +++ b/scripts/build/build/__init__.py @@ -10,7 +10,7 @@ def CommaSeparate(items) -> str: - return ', '.join([x.ArgName for x in items]) + return ', '.join([x.ArgName for x in items]) # Supported platforms/boards/apps for generation/compilation @@ -22,138 +22,140 @@ def CommaSeparate(items) -> str: class BuildSteps(Enum): - GENERATED = auto() + GENERATED = auto() class Context: - """Represents a grouped list of platform/board/app builders to use - - to generate make/ninja instructions and to compile. - """ - - def __init__(self, runner, repository_path:str, output_prefix:str): - self.builders = [] - self.builder_factory = BuilderFactory(runner, repository_path, - output_prefix) - self.completed_steps = set() - - def SetupBuilders(self, platforms: Sequence[Platform], - boards: Sequence[Board], - applications: Sequence[Application], - enable_flashbundle: bool): - """Configures internal builders for the given platform/board/app combination. - - Handles smart default selection, so that users only need to specify - part of platform/board/application information and the method tries - to automatically deduce the rest of the arguments. - """ - if not platforms and not boards: - if applications: - platforms = set().union(*[ - TargetRelations.PlatformsForApplication(app) for app in applications - ]) - else: - # when nothing is specified, start with a default host build - platforms = [Platform.HOST] - - # at this point, at least one of 'platforms' or 'boards' is non-empty - if not boards: - boards = set().union(*[ - TargetRelations.BoardsForPlatform(platform) for platform in platforms - ]) - elif not platforms: - platforms = set().union( - *[TargetRelations.PlatformsForBoard(board) for board in boards]) - - if not applications: - applications = set().union(*[ - TargetRelations.ApplicationsForPlatform(platform) - for platform in platforms - ]) - - platforms = set(platforms) - boards = set(boards) - applications = set(applications) - - logging.info('Platforms being built: %s', CommaSeparate(platforms)) - logging.info('Boards being built: %s', CommaSeparate(boards)) - logging.info('Applications being built: %s', CommaSeparate(applications)) - - # Sanity check: ensure all input arguments generate at least an output - platforms_with_builders = set() - boards_with_builders = set() - applications_with_builders = set() - - for platform in sorted(platforms): - for board in sorted(boards): - for application in sorted(applications): - builder = self.builder_factory.Create(platform, board, application, enable_flashbundle=enable_flashbundle) - if not builder: - logging.debug('Builder not supported for tuple %s/%s/%s', platform, - board, application) - continue - - self.builders.append(builder) - platforms_with_builders.add(platform) - boards_with_builders.add(board) - applications_with_builders.add(application) - - if platforms != platforms_with_builders: - logging.warn('Platforms without build output: %s', - CommaSeparate(platforms.difference(platforms_with_builders))) - - if boards != boards_with_builders: - logging.warn('Boards without build output: %s', - CommaSeparate(boards.difference(boards_with_builders))) - - if applications != applications_with_builders: - logging.warn( - 'Applications without build output: %s', - CommaSeparate(applications.difference(applications_with_builders))) - - # whenever builders change, assume generation is required again - self.completed_steps.discard(BuildSteps.GENERATED) - - def Generate(self): - """Performs a build generation IFF code generation has not yet been performed.""" - if BuildSteps.GENERATED in self.completed_steps: - return - - for builder in self.builders: - logging.info('Generating %s', builder.output_dir) - builder.generate() - - self.completed_steps.add(BuildSteps.GENERATED) - - def Build(self): - self.Generate() - - for builder in self.builders: - logging.info('Building %s', builder.output_dir) - builder.build() - - def CleanOutputDirectories(self): - for builder in self.builders: - logging.warn('Cleaning %s', builder.output_dir) - shutil.rmtree(builder.output_dir) - - # any generated output was cleaned - self.completed_steps.discard(BuildSteps.GENERATED) - - def CreateArtifactArchives(self, directory: str): - logging.info('Copying build artifacts to %s', directory) - if not os.path.exists(directory): - os.makedirs(directory) - for builder in self.builders: - # FIXME: builder subdir... - builder.CompressArtifacts(os.path.join( - directory, f'{builder.identifier}.tar.gz')) - - def CopyArtifactsTo(self, path: str): - logging.info('Copying build artifacts to %s', path) - if not os.path.exists(path): - os.makedirs(path) - - for builder in self.builders: - # FIXME: builder subdir... - builder.CopyArtifacts(os.path.join(path, builder.identifier)) + """Represents a grouped list of platform/board/app builders to use + + to generate make/ninja instructions and to compile. + """ + + def __init__(self, runner, repository_path: str, output_prefix: str): + self.builders = [] + self.builder_factory = BuilderFactory(runner, repository_path, + output_prefix) + self.completed_steps = set() + + def SetupBuilders(self, platforms: Sequence[Platform], + boards: Sequence[Board], + applications: Sequence[Application], + enable_flashbundle: bool): + """Configures internal builders for the given platform/board/app combination. + + Handles smart default selection, so that users only need to specify + part of platform/board/application information and the method tries + to automatically deduce the rest of the arguments. + """ + if not platforms and not boards: + if applications: + platforms = set().union(*[ + TargetRelations.PlatformsForApplication(app) for app in applications + ]) + else: + # when nothing is specified, start with a default host build + platforms = [Platform.HOST] + + # at this point, at least one of 'platforms' or 'boards' is non-empty + if not boards: + boards = set().union(*[ + TargetRelations.BoardsForPlatform(platform) for platform in platforms + ]) + elif not platforms: + platforms = set().union( + *[TargetRelations.PlatformsForBoard(board) for board in boards]) + + if not applications: + applications = set().union(*[ + TargetRelations.ApplicationsForPlatform(platform) + for platform in platforms + ]) + + platforms = set(platforms) + boards = set(boards) + applications = set(applications) + + logging.info('Platforms being built: %s', CommaSeparate(platforms)) + logging.info('Boards being built: %s', CommaSeparate(boards)) + logging.info('Applications being built: %s', + CommaSeparate(applications)) + + # Sanity check: ensure all input arguments generate at least an output + platforms_with_builders = set() + boards_with_builders = set() + applications_with_builders = set() + + for platform in sorted(platforms): + for board in sorted(boards): + for application in sorted(applications): + builder = self.builder_factory.Create( + platform, board, application, enable_flashbundle=enable_flashbundle) + if not builder: + logging.debug('Builder not supported for tuple %s/%s/%s', platform, + board, application) + continue + + self.builders.append(builder) + platforms_with_builders.add(platform) + boards_with_builders.add(board) + applications_with_builders.add(application) + + if platforms != platforms_with_builders: + logging.warn('Platforms without build output: %s', + CommaSeparate(platforms.difference(platforms_with_builders))) + + if boards != boards_with_builders: + logging.warn('Boards without build output: %s', + CommaSeparate(boards.difference(boards_with_builders))) + + if applications != applications_with_builders: + logging.warn( + 'Applications without build output: %s', + CommaSeparate(applications.difference(applications_with_builders))) + + # whenever builders change, assume generation is required again + self.completed_steps.discard(BuildSteps.GENERATED) + + def Generate(self): + """Performs a build generation IFF code generation has not yet been performed.""" + if BuildSteps.GENERATED in self.completed_steps: + return + + for builder in self.builders: + logging.info('Generating %s', builder.output_dir) + builder.generate() + + self.completed_steps.add(BuildSteps.GENERATED) + + def Build(self): + self.Generate() + + for builder in self.builders: + logging.info('Building %s', builder.output_dir) + builder.build() + + def CleanOutputDirectories(self): + for builder in self.builders: + logging.warn('Cleaning %s', builder.output_dir) + shutil.rmtree(builder.output_dir) + + # any generated output was cleaned + self.completed_steps.discard(BuildSteps.GENERATED) + + def CreateArtifactArchives(self, directory: str): + logging.info('Copying build artifacts to %s', directory) + if not os.path.exists(directory): + os.makedirs(directory) + for builder in self.builders: + # FIXME: builder subdir... + builder.CompressArtifacts(os.path.join( + directory, f'{builder.identifier}.tar.gz')) + + def CopyArtifactsTo(self, path: str): + logging.info('Copying build artifacts to %s', path) + if not os.path.exists(path): + os.makedirs(path) + + for builder in self.builders: + # FIXME: builder subdir... + builder.CopyArtifacts(os.path.join(path, builder.identifier)) diff --git a/scripts/build/build/factory.py b/scripts/build/build/factory.py index 432262a43401f1..2e57870640ae4e 100644 --- a/scripts/build/build/factory.py +++ b/scripts/build/build/factory.py @@ -28,53 +28,53 @@ class MatchApplication: - def __init__(self, app, board=None): - self.app = app - self.board = board + def __init__(self, app, board=None): + self.app = app + self.board = board - def Match(self, board: Board, app: Application): - if app != self.app: - return False - return self.board is None or board == self.board + def Match(self, board: Board, app: Application): + if app != self.app: + return False + return self.board is None or board == self.board class Matcher(): - """Figures out if a proper builder can be created for a platform/board/app combination.""" + """Figures out if a proper builder can be created for a platform/board/app combination.""" - def __init__(self, builder_class): - self.builder_class = builder_class - self.app_arguments = {} - self.board_arguments = {} + def __init__(self, builder_class): + self.builder_class = builder_class + self.app_arguments = {} + self.board_arguments = {} - def AcceptApplication(self, __app_key: Application, **kargs): - self.app_arguments[MatchApplication(__app_key)] = kargs + def AcceptApplication(self, __app_key: Application, **kargs): + self.app_arguments[MatchApplication(__app_key)] = kargs - def AcceptApplicationForBoard(self, __app_key: Application, __board: Board, - **kargs): - self.app_arguments[MatchApplication(__app_key, __board)] = kargs + def AcceptApplicationForBoard(self, __app_key: Application, __board: Board, + **kargs): + self.app_arguments[MatchApplication(__app_key, __board)] = kargs - def AcceptBoard(self, __board_key: Board, **kargs): - self.board_arguments[__board_key] = kargs + def AcceptBoard(self, __board_key: Board, **kargs): + self.board_arguments[__board_key] = kargs - def Create(self, runner, __board_key: Board, __app_key: Application, - repo_path: str, **kargs): - """Creates a new builder for the given board/app. """ - if not __board_key in self.board_arguments: - return None + def Create(self, runner, __board_key: Board, __app_key: Application, + repo_path: str, **kargs): + """Creates a new builder for the given board/app. """ + if not __board_key in self.board_arguments: + return None - extra_app_args = None - for key, value in self.app_arguments.items(): - if key.Match(__board_key, __app_key): - extra_app_args = value - break + extra_app_args = None + for key, value in self.app_arguments.items(): + if key.Match(__board_key, __app_key): + extra_app_args = value + break - if extra_app_args is None: - return None + if extra_app_args is None: + return None - kargs.update(self.board_arguments[__board_key]) - kargs.update(extra_app_args) + kargs.update(self.board_arguments[__board_key]) + kargs.update(extra_app_args) - return self.builder_class(repo_path, runner=runner, **kargs) + return self.builder_class(repo_path, runner=runner, **kargs) # Builds a list of acceptable application/board combination for every platform @@ -89,12 +89,15 @@ def Create(self, runner, __board_key: Board, __app_key: Application, # Matrix of what can be compiled and what build options are required # by such compilation _MATCHERS[Platform.HOST].AcceptBoard(Board.NATIVE) -_MATCHERS[Platform.HOST].AcceptApplication(Application.ALL_CLUSTERS, app=HostApp.ALL_CLUSTERS) -_MATCHERS[Platform.HOST].AcceptApplication(Application.CHIP_TOOL, app=HostApp.CHIP_TOOL) +_MATCHERS[Platform.HOST].AcceptApplication( + Application.ALL_CLUSTERS, app=HostApp.ALL_CLUSTERS) +_MATCHERS[Platform.HOST].AcceptApplication( + Application.CHIP_TOOL, app=HostApp.CHIP_TOOL) _MATCHERS[Platform.ESP32].AcceptBoard(Board.DEVKITC, board=Esp32Board.DevKitC) _MATCHERS[Platform.ESP32].AcceptBoard(Board.M5STACK, board=Esp32Board.M5Stack) -_MATCHERS[Platform.ESP32].AcceptBoard(Board.C3DEVKIT, board=Esp32Board.C3DevKit) +_MATCHERS[Platform.ESP32].AcceptBoard( + Board.C3DEVKIT, board=Esp32Board.C3DevKit) _MATCHERS[Platform.ESP32].AcceptApplication( Application.ALL_CLUSTERS, app=Esp32App.ALL_CLUSTERS) _MATCHERS[Platform.ESP32].AcceptApplicationForBoard( @@ -107,10 +110,12 @@ def Create(self, runner, __board_key: Board, __app_key: Application, _MATCHERS[Platform.QPG].AcceptApplication(Application.LOCK) _MATCHERS[Platform.QPG].AcceptBoard(Board.QPG6100) -_MATCHERS[Platform.EFR32].AcceptBoard(Board.BRD4161A, board=Efr32Board.BRD4161A) +_MATCHERS[Platform.EFR32].AcceptBoard( + Board.BRD4161A, board=Efr32Board.BRD4161A) _MATCHERS[Platform.EFR32].AcceptApplication( Application.LIGHT, app=Efr32App.LIGHT) -_MATCHERS[Platform.EFR32].AcceptApplication(Application.LOCK, app=Efr32App.LOCK) +_MATCHERS[Platform.EFR32].AcceptApplication( + Application.LOCK, app=Efr32App.LOCK) _MATCHERS[Platform.EFR32].AcceptApplication( Application.WINDOW_COVERING, app=Efr32App.WINDOW_COVERING) @@ -121,64 +126,66 @@ def Create(self, runner, __board_key: Board, __app_key: Application, _MATCHERS[Platform.NRF].AcceptApplication(Application.LIGHT, app=NrfApp.LIGHT) _MATCHERS[Platform.NRF].AcceptApplication(Application.SHELL, app=NrfApp.SHELL) + class BuilderFactory: - """Creates application builders.""" + """Creates application builders.""" - def __init__(self, runner, repository_path: str, output_prefix: str): - self.runner = runner - self.repository_path = repository_path - self.output_prefix = output_prefix + def __init__(self, runner, repository_path: str, output_prefix: str): + self.runner = runner + self.repository_path = repository_path + self.output_prefix = output_prefix - def Create(self, platform: Platform, board: Board, app: Application, enable_flashbundle: bool = False): - """Creates a builder object for the specified arguments. """ + def Create(self, platform: Platform, board: Board, app: Application, enable_flashbundle: bool = False): + """Creates a builder object for the specified arguments. """ - builder = _MATCHERS[platform].Create( - self.runner, - board, - app, - self.repository_path, - output_prefix=self.output_prefix) + builder = _MATCHERS[platform].Create( + self.runner, + board, + app, + self.repository_path, + output_prefix=self.output_prefix) - if builder: - builder.SetIdentifier(platform.name.lower(), board.name.lower(), app.name.lower()) - builder.enable_flashbundle(enable_flashbundle) + if builder: + builder.SetIdentifier(platform.name.lower(), + board.name.lower(), app.name.lower()) + builder.enable_flashbundle(enable_flashbundle) - return builder + return builder class TargetRelations: - """Figures out valid combinations of boards/platforms/applications.""" - - @staticmethod - def BoardsForPlatform(platform: Platform) -> Set[Board]: - global _MATCHERS - return set(_MATCHERS[platform].board_arguments.keys()) - - @staticmethod - def PlatformsForBoard(board: Board) -> Set[Platform]: - """Return the platforms that are using the specified board.""" - global _MATCHERS - platforms = set() - for platform, matcher in _MATCHERS.items(): - if board in matcher.board_arguments: - platforms.add(platform) - return platforms - - @staticmethod - def ApplicationsForPlatform(platform: Platform) -> Set[Application]: - """What applications are buildable for a specific platform.""" - global _MATCHERS - return set( - [matcher.app for matcher in _MATCHERS[platform].app_arguments.keys()]) - - @staticmethod - def PlatformsForApplication(application: Application) -> Set[Platform]: - """For what platforms can the given application be compiled.""" - global _MATCHERS - platforms = set() - for platform, matcher in _MATCHERS.items(): - for app_matcher in matcher.app_arguments: - if application == app_matcher.app: - platforms.add(platform) - break - return platforms + """Figures out valid combinations of boards/platforms/applications.""" + + @staticmethod + def BoardsForPlatform(platform: Platform) -> Set[Board]: + global _MATCHERS + return set(_MATCHERS[platform].board_arguments.keys()) + + @staticmethod + def PlatformsForBoard(board: Board) -> Set[Platform]: + """Return the platforms that are using the specified board.""" + global _MATCHERS + platforms = set() + for platform, matcher in _MATCHERS.items(): + if board in matcher.board_arguments: + platforms.add(platform) + return platforms + + @staticmethod + def ApplicationsForPlatform(platform: Platform) -> Set[Application]: + """What applications are buildable for a specific platform.""" + global _MATCHERS + return set( + [matcher.app for matcher in _MATCHERS[platform].app_arguments.keys()]) + + @staticmethod + def PlatformsForApplication(application: Application) -> Set[Platform]: + """For what platforms can the given application be compiled.""" + global _MATCHERS + platforms = set() + for platform, matcher in _MATCHERS.items(): + for app_matcher in matcher.app_arguments: + if application == app_matcher.app: + platforms.add(platform) + break + return platforms diff --git a/scripts/build/build/targets.py b/scripts/build/build/targets.py index 3fc53ce54f01da..99c0380dcec779 100644 --- a/scripts/build/build/targets.py +++ b/scripts/build/build/targets.py @@ -20,74 +20,74 @@ class Platform(IntEnum): - """Represents a supported build platform for compilation.""" - HOST = auto() - QPG = auto() - ESP32 = auto() - EFR32 = auto() - NRF = auto() + """Represents a supported build platform for compilation.""" + HOST = auto() + QPG = auto() + ESP32 = auto() + EFR32 = auto() + NRF = auto() - @property - def ArgName(self): - return self.name.lower() + @property + def ArgName(self): + return self.name.lower() - @staticmethod - def FromArgName(name): - for value in Platform: - if name == value.ArgName: - return value - raise KeyError() + @staticmethod + def FromArgName(name): + for value in Platform: + if name == value.ArgName: + return value + raise KeyError() class Board(IntEnum): - """Represents Specific boards within a platform.""" - # Host builds - NATIVE = auto() + """Represents Specific boards within a platform.""" + # Host builds + NATIVE = auto() - # QPG platform - QPG6100 = auto() + # QPG platform + QPG6100 = auto() - # ESP32 platform - M5STACK = auto() - DEVKITC = auto() - C3DEVKIT = auto() + # ESP32 platform + M5STACK = auto() + DEVKITC = auto() + C3DEVKIT = auto() - # EFR32 platform - BRD4161A = auto() + # EFR32 platform + BRD4161A = auto() - # NRF platform - NRF52840 = auto() - NRF5340 = auto() + # NRF platform + NRF52840 = auto() + NRF5340 = auto() - @property - def ArgName(self): - return self.name.lower() + @property + def ArgName(self): + return self.name.lower() - @staticmethod - def FromArgName(name): - for value in Board: - if name == value.ArgName: - return value - raise KeyError() + @staticmethod + def FromArgName(name): + for value in Board: + if name == value.ArgName: + return value + raise KeyError() class Application(IntEnum): - """Example applications that can be built.""" - ALL_CLUSTERS = auto() - LIGHT = auto() - LOCK = auto() - WINDOW_COVERING = auto() - SHELL = auto() - CHIP_TOOL = auto() - BRIDGE = auto() - - @property - def ArgName(self): - return self.name.lower().replace('_', '-') - - @staticmethod - def FromArgName(name): - for value in Application: - if name == value.ArgName: - return value - raise KeyError() + """Example applications that can be built.""" + ALL_CLUSTERS = auto() + LIGHT = auto() + LOCK = auto() + WINDOW_COVERING = auto() + SHELL = auto() + CHIP_TOOL = auto() + BRIDGE = auto() + + @property + def ArgName(self): + return self.name.lower().replace('_', '-') + + @staticmethod + def FromArgName(name): + for value in Application: + if name == value.ArgName: + return value + raise KeyError() diff --git a/scripts/build/build_examples.py b/scripts/build/build_examples.py index 21f02884f7b271..6e245fb00f84c0 100755 --- a/scripts/build/build_examples.py +++ b/scripts/build/build_examples.py @@ -14,6 +14,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from runner import PrintOnlyRunner, ShellRunner +import build import coloredlogs import click import logging @@ -22,8 +24,6 @@ sys.path.append(os.path.abspath(os.path.dirname(__file__))) -import build -from runner import PrintOnlyRunner, ShellRunner # Supported log levels, mapping string values required for argument # parsing into logging constants @@ -36,19 +36,19 @@ def ValidateRepoPath(context, parameter, value): - """Validates that the given path looks like a valid chip repository checkout.""" - if value.startswith('/TEST/'): - # Hackish command to allow for unit testing + """Validates that the given path looks like a valid chip repository checkout.""" + if value.startswith('/TEST/'): + # Hackish command to allow for unit testing + return value + + for name in ['BUILD.gn', '.gn', os.path.join('scripts', 'bootstrap.sh')]: + expected_file = os.path.join(value, name) + if not os.path.exists(expected_file): + raise click.BadParameter( + "'%s' does not look like a valid repository path: %s not found." % + (value, expected_file)) return value - for name in ['BUILD.gn', '.gn', os.path.join('scripts', 'bootstrap.sh')]: - expected_file = os.path.join(value, name) - if not os.path.exists(expected_file): - raise click.BadParameter( - "'%s' does not look like a valid repository path: %s not found." % - (value, expected_file)) - return value - @click.group(chain=True) @click.option( @@ -111,45 +111,45 @@ def ValidateRepoPath(context, parameter, value): @click.pass_context def main(context, log_level, platform, board, app, repo, out_prefix, clean, dry_run, dry_run_output, enable_flashbundle): - # Ensures somewhat pretty logging of what is going on - coloredlogs.install( - level=__LOG_LEVELS__[log_level], - fmt='%(asctime)s %(name)s %(levelname)-7s %(message)s') + # Ensures somewhat pretty logging of what is going on + coloredlogs.install( + level=__LOG_LEVELS__[log_level], + fmt='%(asctime)s %(name)s %(levelname)-7s %(message)s') - if not 'PW_PROJECT_ROOT' in os.environ: - raise click.UsageError(""" + if not 'PW_PROJECT_ROOT' in os.environ: + raise click.UsageError(""" PW_PROJECT_ROOT not in current environment. Please make sure you `source scripts/bootstrap.sh` or `source scripts/activate.sh` before running this script. """.strip()) - # Support an 'all platforms' choice - if 'all' in platform: - platform = build.PLATFORMS + # Support an 'all platforms' choice + if 'all' in platform: + platform = build.PLATFORMS - if dry_run: - runner = PrintOnlyRunner(dry_run_output) - else: - runner = ShellRunner() + if dry_run: + runner = PrintOnlyRunner(dry_run_output) + else: + runner = ShellRunner() - context.obj = build.Context( - repository_path=repo, output_prefix=out_prefix, runner=runner) - context.obj.SetupBuilders( - platforms=[build.Platform.FromArgName(name) for name in platform], - boards=[build.Board.FromArgName(name) for name in board], - applications=[build.Application.FromArgName(name) for name in app], - enable_flashbundle=enable_flashbundle) + context.obj = build.Context( + repository_path=repo, output_prefix=out_prefix, runner=runner) + context.obj.SetupBuilders( + platforms=[build.Platform.FromArgName(name) for name in platform], + boards=[build.Board.FromArgName(name) for name in board], + applications=[build.Application.FromArgName(name) for name in app], + enable_flashbundle=enable_flashbundle) - if clean: - context.obj.CleanOutputDirectories() + if clean: + context.obj.CleanOutputDirectories() @main.command( 'gen', help='Generate ninja/makefiles (but does not run the compilation)') @click.pass_context def cmd_generate(context): - context.obj.Generate() + context.obj.Generate() @main.command('build', help='generate and run ninja/make as needed to compile') @@ -165,14 +165,14 @@ def cmd_generate(context): help='Prefix of compressed archives of the generated files.') @click.pass_context def cmd_build(context, copy_artifacts_to, create_archives): - context.obj.Build() + context.obj.Build() - if copy_artifacts_to: - context.obj.CopyArtifactsTo(copy_artifacts_to) + if copy_artifacts_to: + context.obj.CopyArtifactsTo(copy_artifacts_to) - if create_archives: - context.obj.CreateArtifactArchives(create_archives) + if create_archives: + context.obj.CreateArtifactArchives(create_archives) if __name__ == '__main__': - main() + main() diff --git a/scripts/build/builders/builder.py b/scripts/build/builders/builder.py index 792b7d7d850d33..74b8cb3c2b9a5c 100644 --- a/scripts/build/builders/builder.py +++ b/scripts/build/builders/builder.py @@ -20,100 +20,102 @@ class Builder(ABC): - """Generic builder base class for CHIP. + """Generic builder base class for CHIP. - Provides ability to boostrap and copy output artifacts and subclasses can use - a generic shell runner. + Provides ability to boostrap and copy output artifacts and subclasses can use + a generic shell runner. - """ - - def __init__(self, root, runner, output_prefix: str = 'out'): - self.root = os.path.abspath(root) - self._runner = runner - self.output_prefix = output_prefix - self._enable_flashbundle = False - - # Set post-init once actual build target is known - self.identifier = None - self.output_dir = None - - def enable_flashbundle(self, enable_flashbundle: bool): - self._enable_flashbundle = enable_flashbundle - - @abstractmethod - def generate(self): - """Generate the build files - generally the ninja/makefiles""" - raise NotImplementedError() - - @abstractmethod - def _build(self): - """Perform an actual build""" - raise NotImplementedError() - - def _generate_flashbundle(self): - """Perform an actual generating of flashbundle - - May do nothing (and builder can choose not to implement this) if the - app does not need special steps for generating flashbundle. (e.g. the - example apps on Linux platform can run the ELF files directly.) - """ - pass - - @abstractmethod - def build_outputs(self): - """Return a list of relevant output files after a build. - - May use build output data (e.g. manifests), so this should be invoked - only after a build has succeeded. """ - raise NotImplementedError() - - def flashbundle(self): - """Return the files in flashbundle. - - Return an empty dict (and builder can choose not to implement this) if the - app does not need special files as flashbundle. (e.g. the example apps on - Linux platform can run the ELF files directly.) - - May use data from do_generate_flashbundle, so this should be invoked only - after do_generate_flashbundle has succeeded. - """ - return {} - - def outputs(self): - artifacts = self.build_outputs() - if self._enable_flashbundle: - artifacts.update(self.flashbundle()) - return artifacts - - def build(self): - self._build() - if self._enable_flashbundle: - self._generate_flashbundle() - - def _Execute(self, cmdarray, cwd=None, title=None): - self._runner.Run(cmdarray, cwd=cwd, title=title) - - def CompressArtifacts(self, target_file: str): - with tarfile.open(target_file, "w:gz") as tar: - for target_name, source_name in self.outputs().items(): - logging.info(f'Adding {source_name} into {target_file}/{target_name}') - tar.add(source_name, target_name) - - def CopyArtifacts(self, target_dir: str): - for target_name, source_name in self.outputs().items(): - target_full_name = os.path.join(target_dir, target_name) - - logging.info('Copying %s into %s', source_name, target_name) - - target_dir_full_name = os.path.dirname(target_full_name) - - if not os.path.exists(target_dir_full_name): - logging.info('Creating subdirectory %s first', target_dir_full_name) - os.makedirs(target_dir_full_name) - - shutil.copyfile(source_name, target_full_name) - def SetIdentifier(self, platform: str, board: str, app: str): - self.identifier = '-'.join([platform, board, app]) - self.output_dir = os.path.join(self.output_prefix, self.identifier) + def __init__(self, root, runner, output_prefix: str = 'out'): + self.root = os.path.abspath(root) + self._runner = runner + self.output_prefix = output_prefix + self._enable_flashbundle = False + + # Set post-init once actual build target is known + self.identifier = None + self.output_dir = None + + def enable_flashbundle(self, enable_flashbundle: bool): + self._enable_flashbundle = enable_flashbundle + + @abstractmethod + def generate(self): + """Generate the build files - generally the ninja/makefiles""" + raise NotImplementedError() + + @abstractmethod + def _build(self): + """Perform an actual build""" + raise NotImplementedError() + + def _generate_flashbundle(self): + """Perform an actual generating of flashbundle + + May do nothing (and builder can choose not to implement this) if the + app does not need special steps for generating flashbundle. (e.g. the + example apps on Linux platform can run the ELF files directly.) + """ + pass + + @abstractmethod + def build_outputs(self): + """Return a list of relevant output files after a build. + + May use build output data (e.g. manifests), so this should be invoked + only after a build has succeeded. + """ + raise NotImplementedError() + + def flashbundle(self): + """Return the files in flashbundle. + + Return an empty dict (and builder can choose not to implement this) if the + app does not need special files as flashbundle. (e.g. the example apps on + Linux platform can run the ELF files directly.) + + May use data from do_generate_flashbundle, so this should be invoked only + after do_generate_flashbundle has succeeded. + """ + return {} + + def outputs(self): + artifacts = self.build_outputs() + if self._enable_flashbundle: + artifacts.update(self.flashbundle()) + return artifacts + + def build(self): + self._build() + if self._enable_flashbundle: + self._generate_flashbundle() + + def _Execute(self, cmdarray, cwd=None, title=None): + self._runner.Run(cmdarray, cwd=cwd, title=title) + + def CompressArtifacts(self, target_file: str): + with tarfile.open(target_file, "w:gz") as tar: + for target_name, source_name in self.outputs().items(): + logging.info( + f'Adding {source_name} into {target_file}/{target_name}') + tar.add(source_name, target_name) + + def CopyArtifacts(self, target_dir: str): + for target_name, source_name in self.outputs().items(): + target_full_name = os.path.join(target_dir, target_name) + + logging.info('Copying %s into %s', source_name, target_name) + + target_dir_full_name = os.path.dirname(target_full_name) + + if not os.path.exists(target_dir_full_name): + logging.info('Creating subdirectory %s first', + target_dir_full_name) + os.makedirs(target_dir_full_name) + + shutil.copyfile(source_name, target_full_name) + + def SetIdentifier(self, platform: str, board: str, app: str): + self.identifier = '-'.join([platform, board, app]) + self.output_dir = os.path.join(self.output_prefix, self.identifier) diff --git a/scripts/build/builders/efr32.py b/scripts/build/builders/efr32.py index 44386fa9c60ac3..56d649d1fc934e 100644 --- a/scripts/build/builders/efr32.py +++ b/scripts/build/builders/efr32.py @@ -20,78 +20,80 @@ class Efr32App(Enum): - LIGHT = auto() - LOCK = auto() - WINDOW_COVERING = auto() - - def ExampleName(self): - if self == Efr32App.LIGHT: - return 'lighting-app' - elif self == Efr32App.LOCK: - return 'lock-app' - elif self == Efr32App.WINDOW_COVERING: - return 'window-app' - else: - raise Exception('Unknown app type: %r' % self) - - def AppNamePrefix(self): - if self == Efr32App.LIGHT: - return 'chip-efr32-lighting-example' - elif self == Efr32App.LOCK: - return 'chip-efr32-lock-example' - elif self == Efr32App.WINDOW_COVERING: - return 'chip-efr32-window-example' - else: - raise Exception('Unknown app type: %r' % self) - - def FlashBundleName(self): - if self == Efr32App.LIGHT: - return 'lighting_app.flashbundle.txt' - elif self == Efr32App.LOCK: - return 'lock_app.flashbundle.txt' - elif self == Efr32App.WINDOW_COVERING: - return 'window_app.flashbundle.txt' - else: - raise Exception('Unknown app type: %r' % self) + LIGHT = auto() + LOCK = auto() + WINDOW_COVERING = auto() + + def ExampleName(self): + if self == Efr32App.LIGHT: + return 'lighting-app' + elif self == Efr32App.LOCK: + return 'lock-app' + elif self == Efr32App.WINDOW_COVERING: + return 'window-app' + else: + raise Exception('Unknown app type: %r' % self) + + def AppNamePrefix(self): + if self == Efr32App.LIGHT: + return 'chip-efr32-lighting-example' + elif self == Efr32App.LOCK: + return 'chip-efr32-lock-example' + elif self == Efr32App.WINDOW_COVERING: + return 'chip-efr32-window-example' + else: + raise Exception('Unknown app type: %r' % self) + + def FlashBundleName(self): + if self == Efr32App.LIGHT: + return 'lighting_app.flashbundle.txt' + elif self == Efr32App.LOCK: + return 'lock_app.flashbundle.txt' + elif self == Efr32App.WINDOW_COVERING: + return 'window_app.flashbundle.txt' + else: + raise Exception('Unknown app type: %r' % self) class Efr32Board(Enum): - BRD4161A = 1 + BRD4161A = 1 - def GnArgName(self): - if self == Efr32Board.BRD4161A: - return 'BRD4161A' + def GnArgName(self): + if self == Efr32Board.BRD4161A: + return 'BRD4161A' class Efr32Builder(GnBuilder): - def __init__(self, - root, - runner, - output_prefix: str, - app: Efr32App = Efr32App.LIGHT, - board: Efr32Board = Efr32Board.BRD4161A): - super(Efr32Builder, self).__init__( - root=os.path.join(root, 'examples', app.ExampleName(), 'efr32'), - runner=runner, - output_prefix=output_prefix) - - self.app = app - self.gn_build_args = ['efr32_board="%s"' % board.GnArgName()] - - def build_outputs(self): - items = { - '%s.out' % self.app.AppNamePrefix(): - os.path.join(self.output_dir, '%s.out' % self.app.AppNamePrefix()), - '%s.out.map' % self.app.AppNamePrefix(): - os.path.join(self.output_dir, - '%s.out.map' % self.app.AppNamePrefix()), - } - - # Figure out flash bundle files and build accordingly - with open(os.path.join(self.output_dir, self.app.FlashBundleName())) as f: - for line in f.readlines(): - name = line.strip() - items['flashbundle/%s' % name] = os.path.join(self.output_dir, name) - - return items + def __init__(self, + root, + runner, + output_prefix: str, + app: Efr32App = Efr32App.LIGHT, + board: Efr32Board = Efr32Board.BRD4161A): + super(Efr32Builder, self).__init__( + root=os.path.join(root, 'examples', app.ExampleName(), 'efr32'), + runner=runner, + output_prefix=output_prefix) + + self.app = app + self.gn_build_args = ['efr32_board="%s"' % board.GnArgName()] + + def build_outputs(self): + items = { + '%s.out' % self.app.AppNamePrefix(): + os.path.join(self.output_dir, '%s.out' % + self.app.AppNamePrefix()), + '%s.out.map' % self.app.AppNamePrefix(): + os.path.join(self.output_dir, + '%s.out.map' % self.app.AppNamePrefix()), + } + + # Figure out flash bundle files and build accordingly + with open(os.path.join(self.output_dir, self.app.FlashBundleName())) as f: + for line in f.readlines(): + name = line.strip() + items['flashbundle/%s' % + name] = os.path.join(self.output_dir, name) + + return items diff --git a/scripts/build/builders/esp32.py b/scripts/build/builders/esp32.py index f393165517df71..3da7650a930bed 100644 --- a/scripts/build/builders/esp32.py +++ b/scripts/build/builders/esp32.py @@ -22,104 +22,105 @@ class Esp32Board(Enum): - DevKitC = auto() - M5Stack = auto() - C3DevKit = auto() + DevKitC = auto() + M5Stack = auto() + C3DevKit = auto() class Esp32App(Enum): - ALL_CLUSTERS = auto() - LOCK = auto() - SHELL = auto() - BRIDGE = auto() - - @property - def ExampleName(self): - if self == Esp32App.ALL_CLUSTERS: - return 'all-clusters-app' - elif self == Esp32App.LOCK: - return 'lock-app' - elif self == Esp32App.SHELL: - return 'shell' - elif self == Esp32App.BRIDGE: - return 'bridge-app' - else: - raise Exception('Unknown app type: %r' % self) - - @property - def AppNamePrefix(self): - if self == Esp32App.ALL_CLUSTERS: - return 'chip-all-clusters-app' - elif self == Esp32App.LOCK: - return 'chip-lock-app' - elif self == Esp32App.SHELL: - return 'chip-shell' - elif self == Esp32App.BRIDGE: - return 'chip-bridge-app' - else: - raise Exception('Unknown app type: %r' % self) + ALL_CLUSTERS = auto() + LOCK = auto() + SHELL = auto() + BRIDGE = auto() + + @property + def ExampleName(self): + if self == Esp32App.ALL_CLUSTERS: + return 'all-clusters-app' + elif self == Esp32App.LOCK: + return 'lock-app' + elif self == Esp32App.SHELL: + return 'shell' + elif self == Esp32App.BRIDGE: + return 'bridge-app' + else: + raise Exception('Unknown app type: %r' % self) + + @property + def AppNamePrefix(self): + if self == Esp32App.ALL_CLUSTERS: + return 'chip-all-clusters-app' + elif self == Esp32App.LOCK: + return 'chip-lock-app' + elif self == Esp32App.SHELL: + return 'chip-shell' + elif self == Esp32App.BRIDGE: + return 'chip-bridge-app' + else: + raise Exception('Unknown app type: %r' % self) def DefaultsFileName(board: Esp32Board, app: Esp32App): - if app != Esp32App.ALL_CLUSTERS: - # only all-clusters has a specific defaults name - return None - - if board == Esp32Board.DevKitC: - return 'sdkconfig.defaults' - elif board == Esp32Board.M5Stack: - return 'sdkconfig_m5stack.defaults' - elif board == Esp32Board.C3DevKit: - return 'sdkconfig_c3devkit.defaults' - else: - raise Exception('Unknown board type') + if app != Esp32App.ALL_CLUSTERS: + # only all-clusters has a specific defaults name + return None + + if board == Esp32Board.DevKitC: + return 'sdkconfig.defaults' + elif board == Esp32Board.M5Stack: + return 'sdkconfig_m5stack.defaults' + elif board == Esp32Board.C3DevKit: + return 'sdkconfig_c3devkit.defaults' + else: + raise Exception('Unknown board type') class Esp32Builder(Builder): - def __init__(self, - root, - runner, - output_prefix: str, - board: Esp32Board = Esp32Board.M5Stack, - app: Esp32App = Esp32App.ALL_CLUSTERS): - super(Esp32Builder, self).__init__(root, runner, output_prefix) - self.board = board - self.app = app - - def _IdfEnvExecute(self, cmd, cwd=None, title=None): - self._Execute( - ['bash', '-c', 'source $IDF_PATH/export.sh; %s' % cmd], - cwd=cwd, - title=title) - - def generate(self): - if os.path.exists(os.path.join(self.output_dir, 'build.ninja')): - return - - defaults = DefaultsFileName(self.board, self.app) - - cmd = 'idf.py' - - if defaults: - cmd += " -D SDKCONFIG_DEFAULTS='%s'" % defaults - - cmd += ' -C examples/%s/esp32 -B %s reconfigure' % (self.app.ExampleName, shlex.quote(self.output_dir)) - - # This will do a 'cmake reconfigure' which will create ninja files without rebuilding - self._IdfEnvExecute( - cmd, cwd=self.root, title='Generating ' + self.identifier) - - def _build(self): - logging.info('Compiling Esp32 at %s', self.output_dir) - - self._IdfEnvExecute( - "ninja -C '%s'" % self.output_dir, title='Building ' + self.identifier) - - def build_outputs(self): - return { - self.app.AppNamePrefix + '.elf': - os.path.join(self.output_dir, self.app.AppNamePrefix + '.elf'), - self.app.AppNamePrefix + '.map': - os.path.join(self.output_dir, self.app.AppNamePrefix + '.map'), - } + def __init__(self, + root, + runner, + output_prefix: str, + board: Esp32Board = Esp32Board.M5Stack, + app: Esp32App = Esp32App.ALL_CLUSTERS): + super(Esp32Builder, self).__init__(root, runner, output_prefix) + self.board = board + self.app = app + + def _IdfEnvExecute(self, cmd, cwd=None, title=None): + self._Execute( + ['bash', '-c', 'source $IDF_PATH/export.sh; %s' % cmd], + cwd=cwd, + title=title) + + def generate(self): + if os.path.exists(os.path.join(self.output_dir, 'build.ninja')): + return + + defaults = DefaultsFileName(self.board, self.app) + + cmd = 'idf.py' + + if defaults: + cmd += " -D SDKCONFIG_DEFAULTS='%s'" % defaults + + cmd += ' -C examples/%s/esp32 -B %s reconfigure' % ( + self.app.ExampleName, shlex.quote(self.output_dir)) + + # This will do a 'cmake reconfigure' which will create ninja files without rebuilding + self._IdfEnvExecute( + cmd, cwd=self.root, title='Generating ' + self.identifier) + + def _build(self): + logging.info('Compiling Esp32 at %s', self.output_dir) + + self._IdfEnvExecute( + "ninja -C '%s'" % self.output_dir, title='Building ' + self.identifier) + + def build_outputs(self): + return { + self.app.AppNamePrefix + '.elf': + os.path.join(self.output_dir, self.app.AppNamePrefix + '.elf'), + self.app.AppNamePrefix + '.map': + os.path.join(self.output_dir, self.app.AppNamePrefix + '.map'), + } diff --git a/scripts/build/builders/gn.py b/scripts/build/builders/gn.py index 9bed9ed290baa7..94d436adfcce67 100644 --- a/scripts/build/builders/gn.py +++ b/scripts/build/builders/gn.py @@ -20,32 +20,32 @@ class GnBuilder(Builder): - def __init__(self, root, runner, output_prefix): - """Creates a generic ninja builder. + def __init__(self, root, runner, output_prefix): + """Creates a generic ninja builder. - Args: - root: the root where to run GN into - runner: what to use to execute shell commands - output_prefix: where ninja files are to be generated - """ - super(GnBuilder, self).__init__(root, runner, output_prefix) + Args: + root: the root where to run GN into + runner: what to use to execute shell commands + output_prefix: where ninja files are to be generated + """ + super(GnBuilder, self).__init__(root, runner, output_prefix) - self.gn_build_args = None + self.gn_build_args = None - def generate(self): - if not os.path.exists(self.output_dir): - cmd = [ - 'gn', 'gen', '--check', '--fail-on-unused-args', - '--root=%s' % self.root - ] + def generate(self): + if not os.path.exists(self.output_dir): + cmd = [ + 'gn', 'gen', '--check', '--fail-on-unused-args', + '--root=%s' % self.root + ] - if self.gn_build_args: - cmd += ['--args=%s' % ' '.join(self.gn_build_args)] + if self.gn_build_args: + cmd += ['--args=%s' % ' '.join(self.gn_build_args)] - cmd += [self.output_dir] + cmd += [self.output_dir] - self._Execute(cmd, title='Generating ' + self.identifier) + self._Execute(cmd, title='Generating ' + self.identifier) - def _build(self): - self._Execute(['ninja', '-C', self.output_dir], - title='Building ' + self.identifier) + def _build(self): + self._Execute(['ninja', '-C', self.output_dir], + title='Building ' + self.identifier) diff --git a/scripts/build/builders/host.py b/scripts/build/builders/host.py index b16a1fe67371be..b9628e8257d4b1 100644 --- a/scripts/build/builders/host.py +++ b/scripts/build/builders/host.py @@ -19,46 +19,50 @@ from .gn import GnBuilder + class HostApp(Enum): - ALL_CLUSTERS = auto() - CHIP_TOOL = auto() + ALL_CLUSTERS = auto() + CHIP_TOOL = auto() + + def ExamplePath(self): + if self == HostApp.ALL_CLUSTERS: + return 'all-clusters-app/linux' + elif self == HostApp.CHIP_TOOL: + return 'chip-tool' + else: + raise Exception('Unknown app type: %r' % self) - def ExamplePath(self): - if self == HostApp.ALL_CLUSTERS: - return 'all-clusters-app/linux' - elif self == HostApp.CHIP_TOOL: - return 'chip-tool' - else: - raise Exception('Unknown app type: %r' % self) + def BinaryName(self): + if self == HostApp.ALL_CLUSTERS: + return 'chip-all-clusters-app' + elif self == HostApp.CHIP_TOOL: + return 'chip-tool' + else: + raise Exception('Unknown app type: %r' % self) - def BinaryName(self): - if self == HostApp.ALL_CLUSTERS: - return 'chip-all-clusters-app' - elif self == HostApp.CHIP_TOOL: - return 'chip-tool' - else: - raise Exception('Unknown app type: %r' % self) def ConcretePlatformName(): uname_result = uname() return '-'.join([uname_result.system.lower(), release(), uname_result.machine]) + class HostBuilder(GnBuilder): - def __init__(self, root, runner, output_prefix: str, app: HostApp): - super(HostBuilder, self).__init__( - root=os.path.join(root, 'examples', app.ExamplePath()), - runner=runner, - output_prefix=output_prefix) + def __init__(self, root, runner, output_prefix: str, app: HostApp): + super(HostBuilder, self).__init__( + root=os.path.join(root, 'examples', app.ExamplePath()), + runner=runner, + output_prefix=output_prefix) - self.app_name = app.BinaryName() - self.map_name = self.app_name + '.map' + self.app_name = app.BinaryName() + self.map_name = self.app_name + '.map' - def build_outputs(self): - return { - self.app_name: os.path.join(self.output_dir, self.app_name), - self.map_name : os.path.join(self.output_dir, self.map_name) - } + def build_outputs(self): + return { + self.app_name: os.path.join(self.output_dir, self.app_name), + self.map_name: os.path.join(self.output_dir, self.map_name) + } - def SetIdentifier(self, platform: str, board: str, app: str): - super(HostBuilder, self).SetIdentifier(ConcretePlatformName(), board, app) + def SetIdentifier(self, platform: str, board: str, app: str): + super(HostBuilder, self).SetIdentifier( + ConcretePlatformName(), board, app) diff --git a/scripts/build/builders/nrf.py b/scripts/build/builders/nrf.py index 2d198343e692d5..467d22cd7b041b 100644 --- a/scripts/build/builders/nrf.py +++ b/scripts/build/builders/nrf.py @@ -22,124 +22,131 @@ class NrfApp(Enum): - LIGHT = auto() - LOCK = auto() - SHELL=auto() - - def ExampleName(self): - if self == NrfApp.LIGHT: - return 'lighting-app' - elif self == NrfApp.LOCK: - return 'lock-app' - elif self == NrfApp.SHELL: - return 'shell' - else: - raise Exception('Unknown app type: %r' % self) - - def AppNamePrefix(self): - if self == NrfApp.LIGHT: - return 'chip-nrf-lighting-example' - elif self == NrfApp.LOCK: - return 'chip-nrf-lock-example' - elif self == NrfApp.SHELL: - return 'chip-nrf-shell' - else: - raise Exception('Unknown app type: %r' % self) - - def _FlashBundlePrefix(self): - if self == NrfApp.LIGHT: - return 'chip-nrfconnect-lighting-example' - elif self == NrfApp.LOCK: - return 'chip-nrfconnect-lock-example' - elif self == NrfApp.SHELL: - return 'chip-nrfconnect-shell-example' - else: - raise Exception('Unknown app type: %r' % self) - - def FlashBundleName(self): - '''Nrf build script will generate a file naming .flashbundle.txt, go through the output dir to find the file and return it.''' - return self._FlashBundlePrefix() + '.flashbundle.txt' + LIGHT = auto() + LOCK = auto() + SHELL = auto() + + def ExampleName(self): + if self == NrfApp.LIGHT: + return 'lighting-app' + elif self == NrfApp.LOCK: + return 'lock-app' + elif self == NrfApp.SHELL: + return 'shell' + else: + raise Exception('Unknown app type: %r' % self) + + def AppNamePrefix(self): + if self == NrfApp.LIGHT: + return 'chip-nrf-lighting-example' + elif self == NrfApp.LOCK: + return 'chip-nrf-lock-example' + elif self == NrfApp.SHELL: + return 'chip-nrf-shell' + else: + raise Exception('Unknown app type: %r' % self) + + def _FlashBundlePrefix(self): + if self == NrfApp.LIGHT: + return 'chip-nrfconnect-lighting-example' + elif self == NrfApp.LOCK: + return 'chip-nrfconnect-lock-example' + elif self == NrfApp.SHELL: + return 'chip-nrfconnect-shell-example' + else: + raise Exception('Unknown app type: %r' % self) + + def FlashBundleName(self): + '''Nrf build script will generate a file naming .flashbundle.txt, go through the output dir to find the file and return it.''' + return self._FlashBundlePrefix() + '.flashbundle.txt' + class NrfBoard(Enum): - NRF52840 = auto() - NRF5340 = auto() + NRF52840 = auto() + NRF5340 = auto() - def GnArgName(self): - if self == NrfBoard.NRF52840: - return 'nrf52840dk_nrf52840' - elif self == NrfBoard.NRF5340: - return 'nrf5340dk_nrf5340_cpuapp' - else: - raise Exception('Unknown board type: %r' % self) + def GnArgName(self): + if self == NrfBoard.NRF52840: + return 'nrf52840dk_nrf52840' + elif self == NrfBoard.NRF5340: + return 'nrf5340dk_nrf5340_cpuapp' + else: + raise Exception('Unknown board type: %r' % self) class NrfConnectBuilder(Builder): - def __init__(self, - root, - runner, - output_prefix: str, - app: NrfApp = NrfApp.LIGHT, - board: NrfBoard = NrfBoard.NRF52840): - super(NrfConnectBuilder, self).__init__(root, runner, output_prefix) - self.app = app - self.board = board - - def generate(self): - if not os.path.exists(self.output_dir): - # NRF does a in-place update of SDK tools - if not self._runner.dry_run: - if 'ZEPHYR_BASE' not in os.environ: - raise Exception("NRF builds require ZEPHYR_BASE to be set") - - zephyr_base = os.environ['ZEPHYR_BASE'] - nrfconnect_sdk = os.path.dirname(zephyr_base) - - # NRF builds will both try to change .west/config in nrfconnect and - # overall perform a git fetch on that location - if not os.access(nrfconnect_sdk, os.W_OK): - raise Exception("Directory %s not writable. NRFConnect builds require updates to this directory." % nrfconnect_sdk) - - # validate the the ZEPHYR_BASE is up to date (generally the case in docker images) - try: - self._Execute(['python3', 'scripts/setup/nrfconnect/update_ncs.py', '--check']) - except Exception as e: - logging.exception('Failed to validate ZEPHYR_BASE status') - logging.error('To update $ZEPHYR_BASE run: python3 scripts/setup/nrfconnect/update_ncs.py --update --shallow') - - raise Exception('ZEPHYR_BASE validation failed') - - cmd = ''' + def __init__(self, + root, + runner, + output_prefix: str, + app: NrfApp = NrfApp.LIGHT, + board: NrfBoard = NrfBoard.NRF52840): + super(NrfConnectBuilder, self).__init__(root, runner, output_prefix) + self.app = app + self.board = board + + def generate(self): + if not os.path.exists(self.output_dir): + # NRF does a in-place update of SDK tools + if not self._runner.dry_run: + if 'ZEPHYR_BASE' not in os.environ: + raise Exception("NRF builds require ZEPHYR_BASE to be set") + + zephyr_base = os.environ['ZEPHYR_BASE'] + nrfconnect_sdk = os.path.dirname(zephyr_base) + + # NRF builds will both try to change .west/config in nrfconnect and + # overall perform a git fetch on that location + if not os.access(nrfconnect_sdk, os.W_OK): + raise Exception( + "Directory %s not writable. NRFConnect builds require updates to this directory." % nrfconnect_sdk) + + # validate the the ZEPHYR_BASE is up to date (generally the case in docker images) + try: + self._Execute( + ['python3', 'scripts/setup/nrfconnect/update_ncs.py', '--check']) + except Exception as e: + logging.exception('Failed to validate ZEPHYR_BASE status') + logging.error( + 'To update $ZEPHYR_BASE run: python3 scripts/setup/nrfconnect/update_ncs.py --update --shallow') + + raise Exception('ZEPHYR_BASE validation failed') + + cmd = ''' source "$ZEPHYR_BASE/zephyr-env.sh"; export GNUARMEMB_TOOLCHAIN_PATH="$PW_PIGWEED_CIPD_INSTALL_DIR"; west build --cmake-only -d {outdir} -b {board} {sourcedir} '''.format( - outdir = shlex.quote(self.output_dir), - board = self.board.GnArgName(), - sourcedir=shlex.quote(os.path.join(self.root, 'examples', self.app.ExampleName(), 'nrfconnect')) - ).strip() - - self._Execute(['bash', '-c', cmd], title='Generating ' + self.identifier) - - - def _build(self): - logging.info('Compiling NrfConnect at %s', self.output_dir) - - self._Execute(['ninja', '-C', self.output_dir], title='Building ' + self.identifier) - - def _generate_flashbundle(self): - logging.info(f'Generating flashbundle at {self.output_dir}') - - self._Execute(['ninja', '-C', self.output_dir, 'flashing_script'], title='Generating flashable files of ' + self.identifier) - - def build_outputs(self): - return { - '%s.elf' % self.app.AppNamePrefix(): os.path.join(self.output_dir, 'zephyr', 'zephyr.elf'), - '%s.map' % self.app.AppNamePrefix(): os.path.join(self.output_dir, 'zephyr', 'zephyr.map'), - } - - def flashbundle(self): - with open(os.path.join(self.output_dir, self.app.FlashBundleName()), 'r') as fp: - return { - l.strip(): os.path.join(self.output_dir, l.strip()) for l in fp.readlines() if l.strip() - } + outdir=shlex.quote(self.output_dir), + board=self.board.GnArgName(), + sourcedir=shlex.quote(os.path.join( + self.root, 'examples', self.app.ExampleName(), 'nrfconnect')) + ).strip() + + self._Execute(['bash', '-c', cmd], + title='Generating ' + self.identifier) + + def _build(self): + logging.info('Compiling NrfConnect at %s', self.output_dir) + + self._Execute(['ninja', '-C', self.output_dir], + title='Building ' + self.identifier) + + def _generate_flashbundle(self): + logging.info(f'Generating flashbundle at {self.output_dir}') + + self._Execute(['ninja', '-C', self.output_dir, 'flashing_script'], + title='Generating flashable files of ' + self.identifier) + + def build_outputs(self): + return { + '%s.elf' % self.app.AppNamePrefix(): os.path.join(self.output_dir, 'zephyr', 'zephyr.elf'), + '%s.map' % self.app.AppNamePrefix(): os.path.join(self.output_dir, 'zephyr', 'zephyr.map'), + } + + def flashbundle(self): + with open(os.path.join(self.output_dir, self.app.FlashBundleName()), 'r') as fp: + return { + l.strip(): os.path.join(self.output_dir, l.strip()) for l in fp.readlines() if l.strip() + } diff --git a/scripts/build/builders/qpg.py b/scripts/build/builders/qpg.py index 8f4b8787f868b1..e6af95e1223a64 100644 --- a/scripts/build/builders/qpg.py +++ b/scripts/build/builders/qpg.py @@ -20,16 +20,17 @@ class QpgBuilder(GnBuilder): - def __init__(self, root, runner, output_prefix): - super(QpgBuilder, self).__init__( - root=os.path.join(root, 'examples/lock-app/qpg/'), - runner=runner, - output_prefix=output_prefix) + def __init__(self, root, runner, output_prefix): + super(QpgBuilder, self).__init__( + root=os.path.join(root, 'examples/lock-app/qpg/'), + runner=runner, + output_prefix=output_prefix) - def build_outputs(self): - return { - 'chip-qpg-lock-example.out': - os.path.join(self.output_dir, 'chip-qpg6100-lock-example.out'), - 'chip-qpg-lock-example.out.map': - os.path.join(self.output_dir, 'chip-qpg6100-lock-example.out.map'), - } + def build_outputs(self): + return { + 'chip-qpg-lock-example.out': + os.path.join(self.output_dir, 'chip-qpg6100-lock-example.out'), + 'chip-qpg-lock-example.out.map': + os.path.join(self.output_dir, + 'chip-qpg6100-lock-example.out.map'), + } diff --git a/scripts/build/runner/printonly.py b/scripts/build/runner/printonly.py index 92dfa7129f3408..42a8f0e56a7c59 100644 --- a/scripts/build/runner/printonly.py +++ b/scripts/build/runner/printonly.py @@ -16,20 +16,21 @@ class PrintOnlyRunner: - def __init__(self, output_file): - self.output_file = output_file - self.dry_run = True + def __init__(self, output_file): + self.output_file = output_file + self.dry_run = True - def Run(self, cmd, cwd=None, title=None): - if title: - self.output_file.write("# " + title + "\n") + def Run(self, cmd, cwd=None, title=None): + if title: + self.output_file.write("# " + title + "\n") - if cwd: - self.output_file.write('cd "%s"\n' % cwd) + if cwd: + self.output_file.write('cd "%s"\n' % cwd) - self.output_file.write(" ".join([shlex.quote(part) for part in cmd]) + "\n") + self.output_file.write( + " ".join([shlex.quote(part) for part in cmd]) + "\n") - if cwd: - self.output_file.write("cd -\n") + if cwd: + self.output_file.write("cd -\n") - self.output_file.write("\n") + self.output_file.write("\n") diff --git a/scripts/build/runner/shell.py b/scripts/build/runner/shell.py index 5d9df3dfa9fa7f..eeae0f1779fa9e 100644 --- a/scripts/build/runner/shell.py +++ b/scripts/build/runner/shell.py @@ -20,51 +20,51 @@ class LogPipe(threading.Thread): - def __init__(self, level): - """Setup the object with a logger and a loglevel + def __init__(self, level): + """Setup the object with a logger and a loglevel - and start the thread - """ - threading.Thread.__init__(self) - self.daemon = False - self.level = level - self.fd_read, self.fd_write = os.pipe() - self.pipeReader = os.fdopen(self.fd_read) - self.start() + and start the thread + """ + threading.Thread.__init__(self) + self.daemon = False + self.level = level + self.fd_read, self.fd_write = os.pipe() + self.pipeReader = os.fdopen(self.fd_read) + self.start() - def fileno(self): - """Return the write file descriptor of the pipe""" - return self.fd_write + def fileno(self): + """Return the write file descriptor of the pipe""" + return self.fd_write - def run(self): - """Run the thread, logging everything.""" - for line in iter(self.pipeReader.readline, ''): - logging.log(self.level, line.strip('\n')) + def run(self): + """Run the thread, logging everything.""" + for line in iter(self.pipeReader.readline, ''): + logging.log(self.level, line.strip('\n')) - self.pipeReader.close() + self.pipeReader.close() - def close(self): - """Close the write end of the pipe.""" - os.close(self.fd_write) + def close(self): + """Close the write end of the pipe.""" + os.close(self.fd_write) class ShellRunner: - def __init__(self): - self.dry_run = False + def __init__(self): + self.dry_run = False - def Run(self, cmd, cwd=None, title=None): - outpipe = LogPipe(logging.INFO) - errpipe = LogPipe(logging.WARN) + def Run(self, cmd, cwd=None, title=None): + outpipe = LogPipe(logging.INFO) + errpipe = LogPipe(logging.WARN) - if title: - logging.info(title) + if title: + logging.info(title) - with subprocess.Popen(cmd, cwd=cwd, stdout=outpipe, stderr=errpipe) as s: - outpipe.close() - errpipe.close() - code = s.wait() - if code != 0: - raise Exception('Command %r failed: %d' % (cmd, code)) - else: - logging.info('Command %r completed', cmd) + with subprocess.Popen(cmd, cwd=cwd, stdout=outpipe, stderr=errpipe) as s: + outpipe.close() + errpipe.close() + code = s.wait() + if code != 0: + raise Exception('Command %r failed: %d' % (cmd, code)) + else: + logging.info('Command %r completed', cmd) diff --git a/scripts/build/test.py b/scripts/build/test.py index 600bef3f839976..bdf94f0207451f 100644 --- a/scripts/build/test.py +++ b/scripts/build/test.py @@ -29,49 +29,49 @@ SCRIPT_ROOT = os.path.dirname(__file__) + def build_expected_output(root: str, out: str) -> List[str]: - with open(os.path.join(SCRIPT_ROOT, 'expected_all_platform_commands.txt'), 'rt') as f: - for l in f.readlines(): - yield l.replace("{root}", root).replace("{out}", out).replace('{real_platform}', ConcretePlatformName()) + with open(os.path.join(SCRIPT_ROOT, 'expected_all_platform_commands.txt'), 'rt') as f: + for l in f.readlines(): + yield l.replace("{root}", root).replace("{out}", out).replace('{real_platform}', ConcretePlatformName()) def build_actual_output(root: str, out: str) -> List[str]: - # Fake out that we have a project root - os.environ['PW_PROJECT_ROOT'] = root - - binary = os.path.join(SCRIPT_ROOT, 'build_examples.py') + # Fake out that we have a project root + os.environ['PW_PROJECT_ROOT'] = root - retval = subprocess.run([ - binary, - '--platform', 'all', - '--log-level', 'FATAL', - '--dry-run', - '--repo', root, - '--out-prefix', out, - 'build' - ], stdout=subprocess.PIPE, check=True, encoding='UTF-8') + binary = os.path.join(SCRIPT_ROOT, 'build_examples.py') + retval = subprocess.run([ + binary, + '--platform', 'all', + '--log-level', 'FATAL', + '--dry-run', + '--repo', root, + '--out-prefix', out, + 'build' + ], stdout=subprocess.PIPE, check=True, encoding='UTF-8') - return [l + '\n' for l in retval.stdout.split('\n')] + return [l + '\n' for l in retval.stdout.split('\n')] def main(): - coloredlogs.install(level=logging.INFO, fmt='%(asctime)s %(name)s %(levelname)-7s %(message)s') - - ROOT = '/TEST/BUILD/ROOT' - OUT = '/OUTPUT/DIR' + coloredlogs.install(level=logging.INFO, + fmt='%(asctime)s %(name)s %(levelname)-7s %(message)s') - expected = [l for l in build_expected_output(ROOT, OUT)] - actual = [l for l in build_actual_output(ROOT, OUT)] + ROOT = '/TEST/BUILD/ROOT' + OUT = '/OUTPUT/DIR' - diffs = [line for line in difflib.unified_diff(expected, actual)] + expected = [l for l in build_expected_output(ROOT, OUT)] + actual = [l for l in build_actual_output(ROOT, OUT)] - if diffs: - logging.error("DIFFERENCE between expected and generated output") - for l in diffs: - logging.warning(" " + l.strip()) - sys.exit(1) + diffs = [line for line in difflib.unified_diff(expected, actual)] + if diffs: + logging.error("DIFFERENCE between expected and generated output") + for l in diffs: + logging.warning(" " + l.strip()) + sys.exit(1) if __name__ == "__main__": diff --git a/scripts/examples/build-all-clusters-app.py b/scripts/examples/build-all-clusters-app.py index 8238bcadafa132..4bee3cbfa0b9ed 100755 --- a/scripts/examples/build-all-clusters-app.py +++ b/scripts/examples/build-all-clusters-app.py @@ -12,85 +12,90 @@ ROOT = 'examples/all-clusters-app/esp32' idf_path = os.environ['IDF_PATH'] -class IDFExecutor: - """Runs specified commands via an executor that activates the CHIP build environment.""" - - def __init__(self): - script_path = os.path.dirname(os.path.realpath(__file__)) - self.chip_root = os.path.realpath(os.path.join(script_path, '..', '..')) +class IDFExecutor: + """Runs specified commands via an executor that activates the CHIP build environment.""" - logging.info("CHIP Root directory: %s" % self.chip_root) + def __init__(self): + script_path = os.path.dirname(os.path.realpath(__file__)) + self.chip_root = os.path.realpath( + os.path.join(script_path, '..', '..')) - self.run_cmd = os.path.join(self.chip_root, "scripts", "run_in_build_env.sh") - logging.info("Executing via: %s" % self.run_cmd) + logging.info("CHIP Root directory: %s" % self.chip_root) + self.run_cmd = os.path.join( + self.chip_root, "scripts", "run_in_build_env.sh") + logging.info("Executing via: %s" % self.run_cmd) - def execute(self, command): - os.chdir(self.chip_root) - subprocess.call([self.run_cmd, 'source "%s/export.sh"; cd %s; idf.py %s' % (idf_path, ROOT, command)]) + def execute(self, command): + os.chdir(self.chip_root) + subprocess.call( + [self.run_cmd, 'source "%s/export.sh"; cd %s; idf.py %s' % (idf_path, ROOT, command)]) def main(): - """Main task if executed standalone.""" - parser = argparse.ArgumentParser(description='Build all-clusters-app example') - parser.add_argument( - '--log-level', - default=logging.INFO, - type=lambda x: getattr(logging, x), - help='Configure the logging level.') - parser.add_argument( - '--clear-config', - default=None, - choices=['m5stack', 'devkit', 'curr', 'default'], - ) - parser.add_argument( - '--generate-flash-script', - action='store_true', - ) - - args = parser.parse_args() - - # Ensures somewhat pretty logging of what is going on - logging.basicConfig( - level=args.log_level, - format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') - coloredlogs.install() - - e = IDFExecutor() - - if args.clear_config: - old_default_sdkconfig = None - clear_curr = args.clear_config != 'curr' - logging.info('Building a clear configuration') - - if args.clear_config == "m5stack" or args.clear_config == "devkit": - desired_sdkconfig_name = 'sdkconfig_%s.defaults' % args.clear_config - logging.info('Using default' + desired_sdkconfig_name) - desired_sdkconfig = os.path.join(ROOT, desired_sdkconfig_name) - default_sdkconfig = os.path.join(ROOT, 'sdkconfig.defaults') - old_default_sdkconfig = os.path.join(ROOT, 'sdkconfig.defaults.old') - shutil.copy(default_sdkconfig, old_default_sdkconfig) - shutil.copy(desired_sdkconfig, default_sdkconfig) - - if clear_curr: - logging.info('Clearing current config') - sdkconfig = os.path.join(ROOT, 'sdkconfig') - os.remove(sdkconfig) - - e.execute('menuconfig') - - if old_default_sdkconfig is not None: - shutil.move(old_default_sdkconfig, default_sdkconfig) - - logging.info('Compiling') - - e.execute('build') - - logging.info('Generating flash script') - if args.generate_flash_script: - e.execute('flashing_script') + """Main task if executed standalone.""" + parser = argparse.ArgumentParser( + description='Build all-clusters-app example') + parser.add_argument( + '--log-level', + default=logging.INFO, + type=lambda x: getattr(logging, x), + help='Configure the logging level.') + parser.add_argument( + '--clear-config', + default=None, + choices=['m5stack', 'devkit', 'curr', 'default'], + ) + parser.add_argument( + '--generate-flash-script', + action='store_true', + ) + + args = parser.parse_args() + + # Ensures somewhat pretty logging of what is going on + logging.basicConfig( + level=args.log_level, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') + coloredlogs.install() + + e = IDFExecutor() + + if args.clear_config: + old_default_sdkconfig = None + clear_curr = args.clear_config != 'curr' + logging.info('Building a clear configuration') + + if args.clear_config == "m5stack" or args.clear_config == "devkit": + desired_sdkconfig_name = 'sdkconfig_%s.defaults' % args.clear_config + logging.info('Using default' + desired_sdkconfig_name) + desired_sdkconfig = os.path.join(ROOT, desired_sdkconfig_name) + default_sdkconfig = os.path.join(ROOT, 'sdkconfig.defaults') + old_default_sdkconfig = os.path.join( + ROOT, 'sdkconfig.defaults.old') + shutil.copy(default_sdkconfig, old_default_sdkconfig) + shutil.copy(desired_sdkconfig, default_sdkconfig) + + if clear_curr: + logging.info('Clearing current config') + sdkconfig = os.path.join(ROOT, 'sdkconfig') + os.remove(sdkconfig) + + e.execute('menuconfig') + + if old_default_sdkconfig is not None: + shutil.move(old_default_sdkconfig, default_sdkconfig) + + logging.info('Compiling') + + e.execute('build') + + logging.info('Generating flash script') + if args.generate_flash_script: + e.execute('flashing_script') + if __name__ == '__main__': - # execute only if run as a script - main() + # execute only if run as a script + main() diff --git a/scripts/fixit_rotation.py b/scripts/fixit_rotation.py index 5d4aab7e284afa..ed47ce3479693e 100755 --- a/scripts/fixit_rotation.py +++ b/scripts/fixit_rotation.py @@ -2,40 +2,40 @@ import random ROTATION_CHOICES = [ - 'Andrei (Google)', - 'Boris (Apple)', - 'Cecille (Google)', - 'Damian (Nordic)', - 'Etienne (Silabs)', - 'Junior (Silabs)', - 'Kamil (Nordic)', - 'Kevin (Google)', - # 'Martin (Google)', # TO be enabled July 2021 - 'Michael (Google)', - 'Mingjie (Google)', - 'Pankaj (Apple)', - 'Ricardo (Silabs)', - 'Rob (Google)', - 'Song (Google)', - 'Timothy (Qorvo)', - 'Victor (Samsung)', - 'Vivien (Apple)', - 'Yufeng (Google)', - 'Yunhan (Google)', + 'Andrei (Google)', + 'Boris (Apple)', + 'Cecille (Google)', + 'Damian (Nordic)', + 'Etienne (Silabs)', + 'Junior (Silabs)', + 'Kamil (Nordic)', + 'Kevin (Google)', + # 'Martin (Google)', # TO be enabled July 2021 + 'Michael (Google)', + 'Mingjie (Google)', + 'Pankaj (Apple)', + 'Ricardo (Silabs)', + 'Rob (Google)', + 'Song (Google)', + 'Timothy (Qorvo)', + 'Victor (Samsung)', + 'Vivien (Apple)', + 'Yufeng (Google)', + 'Yunhan (Google)', ] def main(): - """Main task if executed standalone.""" - print("Rolling dice....") + """Main task if executed standalone.""" + print("Rolling dice....") - results = ROTATION_CHOICES[:] - random.shuffle(results) + results = ROTATION_CHOICES[:] + random.shuffle(results) - print("Results: ") - for idx, name in enumerate(results): - print(" %2d: %s" % (idx +1, name)) + print("Results: ") + for idx, name in enumerate(results): + print(" %2d: %s" % (idx + 1, name)) if __name__ == "__main__": - main() + main() diff --git a/scripts/flashing/efr32_firmware_utils.py b/scripts/flashing/efr32_firmware_utils.py index f0fcd896a851c4..a6238874df6c51 100755 --- a/scripts/flashing/efr32_firmware_utils.py +++ b/scripts/flashing/efr32_firmware_utils.py @@ -130,9 +130,9 @@ def flash(self, image): def reset(self): """Reset the device.""" return self.run_tool( - 'commander', - ['device', 'reset', self.DEVICE_ARGUMENTS], - name='Reset') + 'commander', + ['device', 'reset', self.DEVICE_ARGUMENTS], + name='Reset') def actions(self): """Perform actions on the device according to self.option.""" diff --git a/scripts/flashing/esp32_firmware_utils.py b/scripts/flashing/esp32_firmware_utils.py index ed5d2b8c93131b..bbbabb79d1d2bd 100755 --- a/scripts/flashing/esp32_firmware_utils.py +++ b/scripts/flashing/esp32_firmware_utils.py @@ -446,13 +446,16 @@ def actions(self): return self -### Mobly integration +# Mobly integration + + class ESP32Platform: - def __init__(self, flasher_args): - self.flasher = Flasher(**flasher_args) + def __init__(self, flasher_args): + self.flasher = Flasher(**flasher_args) + + def flash(self): + self.flasher.flash_command([os.getcwd()]) - def flash(self): - self.flasher.flash_command([os.getcwd()]) def verify_platform_args(platform_args): required_args = [ @@ -475,11 +478,13 @@ def verify_platform_args(platform_args): if difference: raise ValueError("Required arguments missing: %s" % difference) + def create_platform(platform_args): verify_platform_args(platform_args[0]) return ESP32Platform(platform_args[0]) -### End of Mobly integration +# End of Mobly integration + if __name__ == '__main__': sys.exit(Flasher().flash_command(sys.argv)) diff --git a/scripts/flashing/firmware_utils.py b/scripts/flashing/firmware_utils.py index 31992c72da1a2e..fcc312b07a4467 100644 --- a/scripts/flashing/firmware_utils.py +++ b/scripts/flashing/firmware_utils.py @@ -448,8 +448,8 @@ def make_wrapper(self, argv): with open(args.output, 'w') as script_file: script_file.write(script) os.chmod(args.output, (stat.S_IXUSR | stat.S_IRUSR | stat.S_IWUSR - | stat.S_IXGRP | stat.S_IRGRP - | stat.S_IXOTH | stat.S_IROTH)) + | stat.S_IXGRP | stat.S_IRGRP + | stat.S_IXOTH | stat.S_IROTH)) except OSError as exception: print(exception, sys.stderr) return 1 diff --git a/scripts/flashing/nrfconnect_firmware_utils.py b/scripts/flashing/nrfconnect_firmware_utils.py index 26763a913344c9..69e60a12ccc3a8 100755 --- a/scripts/flashing/nrfconnect_firmware_utils.py +++ b/scripts/flashing/nrfconnect_firmware_utils.py @@ -158,13 +158,16 @@ def actions(self): return self -### Mobly integration +# Mobly integration + + class Nrf5Platform: - def __init__(self, flasher_args): - self.flasher = Flasher(**flasher_args) + def __init__(self, flasher_args): + self.flasher = Flasher(**flasher_args) + + def flash(self): + self.flasher.flash_command([os.getcwd()]) - def flash(self): - self.flasher.flash_command([os.getcwd()]) def verify_platform_args(platform_args): required_args = ['application'] @@ -172,11 +175,13 @@ def verify_platform_args(platform_args): if not r in platform_args: raise ValueError("Required argument %s missing" % r) + def create_platform(platform_args): verify_platform_args(platform_args[0]) return Nrf5Platform(platform_args[0]) -### End of Mobly integration +# End of Mobly integration + if __name__ == '__main__': sys.exit(Flasher().flash_command(sys.argv)) diff --git a/scripts/flashing/qpg_firmware_utils.py b/scripts/flashing/qpg_firmware_utils.py index 7f0fd697e7e31f..c5df781c89c894 100644 --- a/scripts/flashing/qpg_firmware_utils.py +++ b/scripts/flashing/qpg_firmware_utils.py @@ -72,7 +72,8 @@ def verify(self, image): def flash(self, image): """Flash image.""" - self.log(1, "Copying to {} drive {}".format(image, self.option.drive or "None")) + self.log(1, "Copying to {} drive {}".format( + image, self.option.drive or "None")) if not self.option.drive: self.log(0, "--drive or -d required for copy action") self.err = 1 @@ -80,18 +81,21 @@ def flash(self, image): # Check for drive mount if not os.path.exists(self.option.drive): - self.log(0, "Drive '{}' does not exist. Is the USB device mounted correctly ?".format(self.option.drive)) + self.log(0, "Drive '{}' does not exist. Is the USB device mounted correctly ?".format( + self.option.drive)) self.err = 2 return self # Check for valid mBed device mbed_marker = os.path.join(self.option.drive, 'MBED.HTM') if not os.path.exists(mbed_marker): - self.log(0, "Drive '{}' not a path to an MBED device".format(self.option.drive)) + self.log(0, "Drive '{}' not a path to an MBED device".format( + self.option.drive)) self.err = 3 return self - shutil.copyfile(image, os.path.join(self.option.drive, os.path.basename(image))) + shutil.copyfile(image, os.path.join( + self.option.drive, os.path.basename(image))) return self def reset(self): diff --git a/scripts/gen_chip_version.py b/scripts/gen_chip_version.py index 36348f6be4e9f3..bae79f01b4afee 100755 --- a/scripts/gen_chip_version.py +++ b/scripts/gen_chip_version.py @@ -202,29 +202,30 @@ def main(argv): - parser = optparse.OptionParser() + parser = optparse.OptionParser() - parser.add_option('--output_file') - parser.add_option('--chip_major', type=int, default=0) - parser.add_option('--chip_minor', type=int, default=0) - parser.add_option('--chip_patch', type=int, default=0) - parser.add_option('--chip_extra', type=str, default='') + parser.add_option('--output_file') + parser.add_option('--chip_major', type=int, default=0) + parser.add_option('--chip_minor', type=int, default=0) + parser.add_option('--chip_patch', type=int, default=0) + parser.add_option('--chip_extra', type=str, default='') - options, _ = parser.parse_args(argv) + options, _ = parser.parse_args(argv) - template_args = { - 'chip_major': options.chip_major, - 'chip_minor': options.chip_minor, - 'chip_patch': options.chip_patch, - 'chip_extra': options.chip_extra, - } + template_args = { + 'chip_major': options.chip_major, + 'chip_minor': options.chip_minor, + 'chip_patch': options.chip_patch, + 'chip_extra': options.chip_extra, + } - template_args['chip_version'] = '%d.%d.%d%s' % (options.chip_major, options.chip_minor, options.chip_patch, options.chip_extra) + template_args['chip_version'] = '%d.%d.%d%s' % ( + options.chip_major, options.chip_minor, options.chip_patch, options.chip_extra) - with open(options.output_file, 'w') as chip_version_file: - chip_version_file.write(TEMPLATE % template_args) + with open(options.output_file, 'w') as chip_version_file: + chip_version_file.write(TEMPLATE % template_args) - return 0 + return 0 if __name__ == '__main__': diff --git a/scripts/gen_test_driver.py b/scripts/gen_test_driver.py index 61dd9cf3b656f6..d38868b6505faa 100644 --- a/scripts/gen_test_driver.py +++ b/scripts/gen_test_driver.py @@ -43,7 +43,7 @@ ''' -### Forward declarations will be added here +# Forward declarations will be added here TEMPLATE_MAIN_START = ''' int main() @@ -54,55 +54,54 @@ ''' -### Test invokation will be added here +# Test invokation will be added here TEMPLATE_SUFFIX = ''' return code; }''' + def main(argv): - parser = optparse.OptionParser() + parser = optparse.OptionParser() + + parser.add_option('--input_file') + parser.add_option('--output_file') - parser.add_option('--input_file') - parser.add_option('--output_file') + options, _ = parser.parse_args(argv) - options, _ = parser.parse_args(argv) - - tests = [] + tests = [] - TEST_SUITE_RE = re.compile(r'\s*CHIP_REGISTER_TEST_SUITE\(([^)]*)\)') + TEST_SUITE_RE = re.compile(r'\s*CHIP_REGISTER_TEST_SUITE\(([^)]*)\)') - with open(options.input_file, 'r') as input_file: - for l in input_file.readlines(): - match = TEST_SUITE_RE.match(l) - if not match: - continue - - tests.append(match.group(1)) + with open(options.input_file, 'r') as input_file: + for l in input_file.readlines(): + match = TEST_SUITE_RE.match(l) + if not match: + continue - if not tests: - print("ERROR: no tests found in '%s'" % input_file); - print("Did you forget to CHIP_REGISTER_TEST_SUITE?"); - return 1 + tests.append(match.group(1)) - with open(options.output_file, 'w') as output_file: - output_file.write(TEMPLATE_PREFIX) + if not tests: + print("ERROR: no tests found in '%s'" % input_file) + print("Did you forget to CHIP_REGISTER_TEST_SUITE?") + return 1 - for test in tests: - output_file.write("int %s();\n" % test) - output_file.write("\n"); + with open(options.output_file, 'w') as output_file: + output_file.write(TEMPLATE_PREFIX) - output_file.write(TEMPLATE_MAIN_START) + for test in tests: + output_file.write("int %s();\n" % test) + output_file.write("\n") - for test in tests: - output_file.write(" code = code | (%s());\n" % test) - output_file.write("\n"); + output_file.write(TEMPLATE_MAIN_START) + for test in tests: + output_file.write(" code = code | (%s());\n" % test) + output_file.write("\n") - output_file.write(TEMPLATE_SUFFIX) - + output_file.write(TEMPLATE_SUFFIX) - return 0 + return 0 if __name__ == '__main__': diff --git a/scripts/helpers/bloat_check.py b/scripts/helpers/bloat_check.py index 2c715e3da8b65d..52b7d0f46047c5 100755 --- a/scripts/helpers/bloat_check.py +++ b/scripts/helpers/bloat_check.py @@ -43,20 +43,20 @@ class SectionChange: - """Describes delta changes to a specific section""" + """Describes delta changes to a specific section""" - def __init__(self, section, fileChange, vmChange): - self.section = section - self.fileChange = fileChange - self.vmChange = vmChange + def __init__(self, section, fileChange, vmChange): + self.section = section + self.fileChange = fileChange + self.vmChange = vmChange class ComparisonResult: - """Comparison results for an entire file""" + """Comparison results for an entire file""" - def __init__(self, name): - self.fileName = name - self.sectionChanges = [] + def __init__(self, name): + self.fileName = name + self.sectionChanges = [] SECTIONS_TO_WATCH = set( @@ -64,119 +64,124 @@ def __init__(self, name): def filesInDirectory(dirName): - """Get all the file names in the specified directory.""" - for name in os.listdir(dirName): - mode = os.stat(os.path.join(dirName, name)).st_mode - if stat.S_ISREG(mode): - yield name + """Get all the file names in the specified directory.""" + for name in os.listdir(dirName): + mode = os.stat(os.path.join(dirName, name)).st_mode + if stat.S_ISREG(mode): + yield name def writeFileBloatReport(f, baselineName, buildName): - """Generate a bloat report diffing a baseline file with a build output file.""" - logging.info('Running bloaty diff between %s and %s', baselineName, buildName) - f.write('Comparing %s and %s:\n\n' % (baselineName, buildName)) + """Generate a bloat report diffing a baseline file with a build output file.""" + logging.info('Running bloaty diff between %s and %s', + baselineName, buildName) + f.write('Comparing %s and %s:\n\n' % (baselineName, buildName)) - result = subprocess.run( - ['bloaty', '--csv', buildName, '--', baselineName], - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - ) + result = subprocess.run( + ['bloaty', '--csv', buildName, '--', baselineName], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) - if result.returncode != 0: - logging.warning('Bloaty execution failed: %d', result.returncode) - f.write('BLOAT EXECUTION FAILED WITH CODE %d:\n' % result.returncode) + if result.returncode != 0: + logging.warning('Bloaty execution failed: %d', result.returncode) + f.write('BLOAT EXECUTION FAILED WITH CODE %d:\n' % result.returncode) - content = result.stdout.decode('utf8') + content = result.stdout.decode('utf8') - f.write(content) - f.write('\n') + f.write(content) + f.write('\n') - result = ComparisonResult(os.path.basename(buildName)) - try: - reader = csv.reader(io.StringIO(content)) + result = ComparisonResult(os.path.basename(buildName)) + try: + reader = csv.reader(io.StringIO(content)) - for row in reader: - section, vm, f = row - if (section in SECTIONS_TO_WATCH) or (vm not in ['0', 'vmsize']): - result.sectionChanges.append(SectionChange(section, int(f), int(vm))) - except Exception: - pass + for row in reader: + section, vm, f = row + if (section in SECTIONS_TO_WATCH) or (vm not in ['0', 'vmsize']): + result.sectionChanges.append( + SectionChange(section, int(f), int(vm))) + except Exception: + pass - return result + return result def generateBloatReport(outputFileName, baselineDir, buildOutputDir, title='BLOAT REPORT'): - """Generates a bloat report fo files betwen two diferent directories.""" - logging.info('Generating bloat diff report between %s and %s', baselineDir, - buildOutputDir) - with open(outputFileName, 'wt') as f: - f.write(title + '\n\n') - - baselineNames = set([name for name in filesInDirectory(baselineDir)]) - outputNames = set([name for name in filesInDirectory(buildOutputDir)]) - - baselineOnly = baselineNames - outputNames - if baselineOnly: - logging.warning('Some files only exist in the baseline: %r', baselineOnly) - f.write('Files found only in the baseline:\n ') - f.write('\n %s'.join(baselineOnly)) - f.write('\n\n') - - outputOnly = outputNames - baselineNames - if outputOnly: - logging.warning('Some files only exist in the build output: %r', - outputOnly) - f.write('Files found only in the build output:\n ') - f.write('\n %s'.join(outputOnly)) - f.write('\n\n') - - results = [] - for name in (baselineNames & outputNames): - results.append( - writeFileBloatReport(f, os.path.join(baselineDir, name), - os.path.join(buildOutputDir, name))) - return results + """Generates a bloat report fo files betwen two diferent directories.""" + logging.info('Generating bloat diff report between %s and %s', baselineDir, + buildOutputDir) + with open(outputFileName, 'wt') as f: + f.write(title + '\n\n') + + baselineNames = set([name for name in filesInDirectory(baselineDir)]) + outputNames = set([name for name in filesInDirectory(buildOutputDir)]) + + baselineOnly = baselineNames - outputNames + if baselineOnly: + logging.warning( + 'Some files only exist in the baseline: %r', baselineOnly) + f.write('Files found only in the baseline:\n ') + f.write('\n %s'.join(baselineOnly)) + f.write('\n\n') + + outputOnly = outputNames - baselineNames + if outputOnly: + logging.warning('Some files only exist in the build output: %r', + outputOnly) + f.write('Files found only in the build output:\n ') + f.write('\n %s'.join(outputOnly)) + f.write('\n\n') + + results = [] + for name in (baselineNames & outputNames): + results.append( + writeFileBloatReport(f, os.path.join(baselineDir, name), + os.path.join(buildOutputDir, name))) + return results def sendFileAsPrComment(job_name, filename, gh_token, gh_repo, gh_pr_number, compare_results, base_sha): - """Generates a PR comment containing the specified file content.""" + """Generates a PR comment containing the specified file content.""" - logging.info('Uploading report to "%s", PR %d', gh_repo, gh_pr_number) + logging.info('Uploading report to "%s", PR %d', gh_repo, gh_pr_number) - rawText = open(filename, 'rt').read() + rawText = open(filename, 'rt').read() - # a consistent title to help identify obsolete comments - titleHeading = 'Size increase report for "{jobName}"'.format(jobName=job_name) + # a consistent title to help identify obsolete comments + titleHeading = 'Size increase report for "{jobName}"'.format( + jobName=job_name) - api = github.Github(gh_token) - repo = api.get_repo(gh_repo) - pull = repo.get_pull(gh_pr_number) + api = github.Github(gh_token) + repo = api.get_repo(gh_repo) + pull = repo.get_pull(gh_pr_number) - for comment in pull.get_issue_comments(): - if not comment.body.startswith(titleHeading): - continue - logging.info('Removing obsolete comment with heading "%s"', (titleHeading)) + for comment in pull.get_issue_comments(): + if not comment.body.startswith(titleHeading): + continue + logging.info( + 'Removing obsolete comment with heading "%s"', (titleHeading)) - comment.delete() + comment.delete() - if all(len(file.sectionChanges) == 0 for file in compare_results): - logging.info('No results to report') - return + if all(len(file.sectionChanges) == 0 for file in compare_results): + logging.info('No results to report') + return - compareTable = 'File | Section | File | VM\n---- | ---- | ----- | ---- \n' - for file in compare_results: - for change in file.sectionChanges: - compareTable += '{0} | {1} | {2} | {3}\n'.format(file.fileName, - change.section, - change.fileChange, - change.vmChange) + compareTable = 'File | Section | File | VM\n---- | ---- | ----- | ---- \n' + for file in compare_results: + for change in file.sectionChanges: + compareTable += '{0} | {1} | {2} | {3}\n'.format(file.fileName, + change.section, + change.fileChange, + change.vmChange) - # NOTE: PRs are issues with attached patches, hence the API naming - pull.create_issue_comment("""{title} from {baseSha} + # NOTE: PRs are issues with attached patches, hence the API naming + pull.create_issue_comment("""{title} from {baseSha} {table} @@ -192,158 +197,166 @@ def sendFileAsPrComment(job_name, filename, gh_token, gh_repo, gh_pr_number, def getPullRequestBaseSha(githubToken, githubRepo, pullRequestNumber): - """Figure out the SHA for the base of a pull request""" - api = github.Github(githubToken) - repo = api.get_repo(githubRepo) - pull = repo.get_pull(pullRequestNumber) + """Figure out the SHA for the base of a pull request""" + api = github.Github(githubToken) + repo = api.get_repo(githubRepo) + pull = repo.get_pull(pullRequestNumber) + + return pull.base.sha - return pull.base.sha def cleanDir(name): - """Ensures a clean directory with the given name exists. Only handles files""" - if os.path.exists(name): - for fname in os.listdir(name): - path = os.path.join(name, fname) - if os.path.isfile(path): - os.unlink(path) - else: - os.mkdir(name) + """Ensures a clean directory with the given name exists. Only handles files""" + if os.path.exists(name): + for fname in os.listdir(name): + path = os.path.join(name, fname) + if os.path.isfile(path): + os.unlink(path) + else: + os.mkdir(name) def downloadArtifact(artifact, dirName): - """Extract an artifact into a directory.""" - zipFile = zipfile.ZipFile(io.BytesIO(artifact.downloadBlob()), 'r') - logging.info('Extracting zip file to %r' % dirName) - zipFile.extractall(dirName) - -def main(): - """Main task if executed standalone.""" - parser = argparse.ArgumentParser(description='Fetch master build artifacts.') - parser.add_argument( - '--output-dir', - type=str, - default='.', - help='Where to download the artifacts') - parser.add_argument( - '--github-api-token', - type=str, - help='Github API token to upload the report as a comment') - parser.add_argument( - '--github-repository', type=str, help='Repository to use for PR comments') - parser.add_argument( - '--log-level', - default=logging.INFO, - type=lambda x: getattr(logging, x), - help='Configure the logging level.') - args = parser.parse_args() - - # Ensures somewhat pretty logging of what is going on - logging.basicConfig( - level=args.log_level, - format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') - coloredlogs.install() - - if not args.github_api_token: - logging.error('Required arguments missing: github api token is required.') - return - - # all known artifacts - artifacts = [a for a in github_fetch_artifacts.getAllArtifacts(args.github_api_token, args.github_repository)] - - # process newest artifacts first - artifacts.sort(key = lambda x: x.created_at, reverse=True) - - current_time = datetime.datetime.now() - seen_names = set() - pull_artifact_re = re.compile('^(.*)-pull-(\\d+)$') - binary_count = 0 - for a in artifacts: - # logs cleanup after 3 days - is_log = a.name.endswith('-logs') - - if not is_log: - binary_count = binary_count + 1 - - need_delete = False - if (current_time - a.created_at).days > BINARY_KEEP_DAYS: - # Do not keep binary builds forever - need_delete = True - elif not is_log and binary_count > BINARY_MAX_COUNT: - # Keep a maximum number of binary packages - need_delete = True - elif is_log and (current_time - a.created_at).days > LOG_KEEP_DAYS: - # Logs are kept even shorter - need_delete = True - - if need_delete: - logging.info('Old artifact: %s from %r' % (a.name, a.created_at)) - a.delete() - continue - - if a.name.endswith('-logs'): - # logs names are duplicate, however that is fine - continue - - - if a.name in seen_names: - logging.info('Artifact name already seen before: %s' % a.name) - a.delete() - continue - - seen_names.add(a.name) - - m = pull_artifact_re.match(a.name) - if not m: - logging.info('Non-PR artifact found: %r from %r' % (a.name, a.created_at)) - continue - - prefix = m.group(1) - pull_number = int(m.group(2)) - - logging.info('Processing PR %s via artifact %r' % (pull_number, a.name)) - - try: - base_sha = getPullRequestBaseSha(args.github_api_token, args.github_repository, pull_number) + """Extract an artifact into a directory.""" + zipFile = zipfile.ZipFile(io.BytesIO(artifact.downloadBlob()), 'r') + logging.info('Extracting zip file to %r' % dirName) + zipFile.extractall(dirName) - base_artifact_name = '%s-%s' % (prefix, base_sha) - base_artifacts = [v for v in artifacts if v.name == base_artifact_name] - if len(base_artifacts) != 1: - raise Exception('Did not find exactly one artifact for %s: %r' % (base_artifact_name, [v.name for v in base_artifacts])) - - b = base_artifacts[0] - - logging.info('Diff will be against artifact %r' % b.name) - - aOutput = os.path.join(args.output_dir, 'pull_artifact') - bOutput = os.path.join(args.output_dir, 'master_artifact') - - cleanDir(aOutput) - cleanDir(bOutput) - - downloadArtifact(a, aOutput) - downloadArtifact(b, bOutput) +def main(): + """Main task if executed standalone.""" + parser = argparse.ArgumentParser( + description='Fetch master build artifacts.') + parser.add_argument( + '--output-dir', + type=str, + default='.', + help='Where to download the artifacts') + parser.add_argument( + '--github-api-token', + type=str, + help='Github API token to upload the report as a comment') + parser.add_argument( + '--github-repository', type=str, help='Repository to use for PR comments') + parser.add_argument( + '--log-level', + default=logging.INFO, + type=lambda x: getattr(logging, x), + help='Configure the logging level.') + args = parser.parse_args() + + # Ensures somewhat pretty logging of what is going on + logging.basicConfig( + level=args.log_level, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') + coloredlogs.install() + + if not args.github_api_token: + logging.error( + 'Required arguments missing: github api token is required.') + return + + # all known artifacts + artifacts = [a for a in github_fetch_artifacts.getAllArtifacts( + args.github_api_token, args.github_repository)] + + # process newest artifacts first + artifacts.sort(key=lambda x: x.created_at, reverse=True) + + current_time = datetime.datetime.now() + seen_names = set() + pull_artifact_re = re.compile('^(.*)-pull-(\\d+)$') + binary_count = 0 + for a in artifacts: + # logs cleanup after 3 days + is_log = a.name.endswith('-logs') + + if not is_log: + binary_count = binary_count + 1 + + need_delete = False + if (current_time - a.created_at).days > BINARY_KEEP_DAYS: + # Do not keep binary builds forever + need_delete = True + elif not is_log and binary_count > BINARY_MAX_COUNT: + # Keep a maximum number of binary packages + need_delete = True + elif is_log and (current_time - a.created_at).days > LOG_KEEP_DAYS: + # Logs are kept even shorter + need_delete = True + + if need_delete: + logging.info('Old artifact: %s from %r' % (a.name, a.created_at)) + a.delete() + continue + + if a.name.endswith('-logs'): + # logs names are duplicate, however that is fine + continue + + if a.name in seen_names: + logging.info('Artifact name already seen before: %s' % a.name) + a.delete() + continue + + seen_names.add(a.name) + + m = pull_artifact_re.match(a.name) + if not m: + logging.info('Non-PR artifact found: %r from %r' % + (a.name, a.created_at)) + continue + + prefix = m.group(1) + pull_number = int(m.group(2)) + + logging.info('Processing PR %s via artifact %r' % + (pull_number, a.name)) + + try: + base_sha = getPullRequestBaseSha( + args.github_api_token, args.github_repository, pull_number) + + base_artifact_name = '%s-%s' % (prefix, base_sha) + + base_artifacts = [ + v for v in artifacts if v.name == base_artifact_name] + if len(base_artifacts) != 1: + raise Exception('Did not find exactly one artifact for %s: %r' % ( + base_artifact_name, [v.name for v in base_artifacts])) + + b = base_artifacts[0] + + logging.info('Diff will be against artifact %r' % b.name) + + aOutput = os.path.join(args.output_dir, 'pull_artifact') + bOutput = os.path.join(args.output_dir, 'master_artifact') + + cleanDir(aOutput) + cleanDir(bOutput) - report_name = os.path.join(aOutput, 'report.csv') + downloadArtifact(a, aOutput) + downloadArtifact(b, bOutput) - results = generateBloatReport(report_name, bOutput, aOutput) + report_name = os.path.join(aOutput, 'report.csv') - sendFileAsPrComment(prefix, report_name, args.github_api_token, - args.github_repository, pull_number, results, base_sha) + results = generateBloatReport(report_name, bOutput, aOutput) - # If running over a top level directory, ensure git sees no output - cleanDir(aOutput) - cleanDir(bOutput) + sendFileAsPrComment(prefix, report_name, args.github_api_token, + args.github_repository, pull_number, results, base_sha) - # Output processed. - a.delete() + # If running over a top level directory, ensure git sees no output + cleanDir(aOutput) + cleanDir(bOutput) - except Exception as e: - tb = traceback.format_exc() - logging.warning('Failed to process bloat report: %s', tb) + # Output processed. + a.delete() + except Exception as e: + tb = traceback.format_exc() + logging.warning('Failed to process bloat report: %s', tb) if __name__ == '__main__': - # execute only if run as a script - main() + # execute only if run as a script + main() diff --git a/scripts/helpers/clang-tidy-launcher.py b/scripts/helpers/clang-tidy-launcher.py index 08e756ba59bddb..4e276f4d6bdae1 100755 --- a/scripts/helpers/clang-tidy-launcher.py +++ b/scripts/helpers/clang-tidy-launcher.py @@ -3,13 +3,14 @@ import subprocess import sys + def main(): if len(sys.argv) < 2: - return 1; + return 1 cc = sys.argv[1] if not cc.startswith("clang"): - return 0; + return 0 command = ["clang-tidy"] clang_args = [] @@ -41,5 +42,6 @@ def main(): clang_result = subprocess.run(sys.argv[1:]) return clang_result.returncode + if __name__ == "__main__": sys.exit(main()) diff --git a/scripts/helpers/github_fetch_artifacts.py b/scripts/helpers/github_fetch_artifacts.py index b146faef0b1b5e..101a404ccff8d7 100644 --- a/scripts/helpers/github_fetch_artifacts.py +++ b/scripts/helpers/github_fetch_artifacts.py @@ -26,68 +26,74 @@ class ArtifactInfo(github.GithubObject.NonCompletableGithubObject): - def _initAttributes(self): - pass - - def _useAttributes(self, attr): - if 'id' in attr: - self.id = self._makeIntAttribute(attr['id']).value - if 'node_id' in attr: - self.node_id = self._makeStringAttribute(attr['node_id']).value - if 'name' in attr: - self.name = self._makeStringAttribute(attr['name']).value - if 'size_in_bytes' in attr: - self.size_in_bytes = self._makeIntAttribute(attr['size_in_bytes']).value - if 'url' in attr: - self.url = self._makeStringAttribute(attr['url']).value - if 'archive_download_url' in attr: - self.archive_download_url = self._makeStringAttribute(attr['archive_download_url']).value - if 'expired' in attr: - self.expired = self._makeBoolAttribute(attr['expired']).value - if 'created_at' in attr: - self.created_at = self._makeDatetimeAttribute(attr['created_at']).value - if 'updated_at' in attr: - self.expires_at = self._makeDatetimeAttribute(attr['updated_at']).value - - def downloadBlob(self): - url = self.archive_download_url - logging.info('Fetching: %r' % url) - - status, headers, _ = self._requester.requestBlob('GET', url) - - if status != 302: - raise Exception('Expected a redirect during blob download but got status %d, headers %r.' % (status, headers)) - - response = requests.get(headers['location']) - response.raise_for_status() - - return response.content - - def delete(self): - """Delete this artifact.""" - logging.warning('DELETING artifact ' + self.url) - self._requester.requestJsonAndCheck('DELETE', self.url) + def _initAttributes(self): + pass + + def _useAttributes(self, attr): + if 'id' in attr: + self.id = self._makeIntAttribute(attr['id']).value + if 'node_id' in attr: + self.node_id = self._makeStringAttribute(attr['node_id']).value + if 'name' in attr: + self.name = self._makeStringAttribute(attr['name']).value + if 'size_in_bytes' in attr: + self.size_in_bytes = self._makeIntAttribute( + attr['size_in_bytes']).value + if 'url' in attr: + self.url = self._makeStringAttribute(attr['url']).value + if 'archive_download_url' in attr: + self.archive_download_url = self._makeStringAttribute( + attr['archive_download_url']).value + if 'expired' in attr: + self.expired = self._makeBoolAttribute(attr['expired']).value + if 'created_at' in attr: + self.created_at = self._makeDatetimeAttribute( + attr['created_at']).value + if 'updated_at' in attr: + self.expires_at = self._makeDatetimeAttribute( + attr['updated_at']).value + + def downloadBlob(self): + url = self.archive_download_url + logging.info('Fetching: %r' % url) + + status, headers, _ = self._requester.requestBlob('GET', url) + + if status != 302: + raise Exception( + 'Expected a redirect during blob download but got status %d, headers %r.' % (status, headers)) + + response = requests.get(headers['location']) + response.raise_for_status() + + return response.content + + def delete(self): + """Delete this artifact.""" + logging.warning('DELETING artifact ' + self.url) + self._requester.requestJsonAndCheck('DELETE', self.url) + class ArtifactFetcher(github.GithubObject.NonCompletableGithubObject): - def __init__(self, repo): - self.url = repo.url + '/actions/artifacts' - self._requester = repo._requester + def __init__(self, repo): + self.url = repo.url + '/actions/artifacts' + self._requester = repo._requester - def get_artifacts(self): - return github.PaginatedList.PaginatedList( - ArtifactInfo, - self._requester, - self.url, - None, - headers={'Accept': 'application/vnd.github.v3+json'}, - list_item='artifacts', - ) + def get_artifacts(self): + return github.PaginatedList.PaginatedList( + ArtifactInfo, + self._requester, + self.url, + None, + headers={'Accept': 'application/vnd.github.v3+json'}, + list_item='artifacts', + ) def getAllArtifacts(githubToken, githubRepo): - """Get all artifacts visible in the given repo.""" - api = github.Github(githubToken) - repo = api.get_repo(githubRepo) + """Get all artifacts visible in the given repo.""" + api = github.Github(githubToken) + repo = api.get_repo(githubRepo) - return ArtifactFetcher(repo).get_artifacts() + return ArtifactFetcher(repo).get_artifacts() diff --git a/scripts/helpers/restyle-diff.sh b/scripts/helpers/restyle-diff.sh index 4701b0a7e7710a..e292b63410f69e 100755 --- a/scripts/helpers/restyle-diff.sh +++ b/scripts/helpers/restyle-diff.sh @@ -39,6 +39,7 @@ restyle-paths() { } cd "$CHIP_ROOT" -declare -a paths="($(git diff --ignore-submodules --name-only "${1:-master}"))" +declare -a paths="($(git ls-tree -r master --name-only | grep -e '\.py$'))" +# declare -a paths="($(git diff --ignore-submodules --name-only "${1:-master}"))" restyle-paths "${paths[@]}" diff --git a/scripts/helpers/upload_release_asset.py b/scripts/helpers/upload_release_asset.py index 2188e905f29762..2430356425e80b 100755 --- a/scripts/helpers/upload_release_asset.py +++ b/scripts/helpers/upload_release_asset.py @@ -26,94 +26,95 @@ class BundleBuilder: - def __init__(self, outputName, outputPrefix, workingDirectory): - self.outputName = outputName + '.tar.xz' - self.outputPrefix = outputPrefix - self.workingDirectory = workingDirectory + def __init__(self, outputName, outputPrefix, workingDirectory): + self.outputName = outputName + '.tar.xz' + self.outputPrefix = outputPrefix + self.workingDirectory = workingDirectory - logging.info('Creating bundle "%s":', self.outputName) + logging.info('Creating bundle "%s":', self.outputName) - self.output = tarfile.open(self.outputName, 'w:xz') + self.output = tarfile.open(self.outputName, 'w:xz') - def appendFile(self, name): - """Appends the specified file in the working directory to the bundle.""" - logging.info(' Appending %s to the bundle', name) + def appendFile(self, name): + """Appends the specified file in the working directory to the bundle.""" + logging.info(' Appending %s to the bundle', name) - current_directory = os.path.realpath(os.curdir) - try: - os.chdir(self.workingDirectory) - self.output.add(name, os.path.join(self.outputPrefix, name)) - finally: - os.chdir(current_directory) + current_directory = os.path.realpath(os.curdir) + try: + os.chdir(self.workingDirectory) + self.output.add(name, os.path.join(self.outputPrefix, name)) + finally: + os.chdir(current_directory) - def close(self): - """Closes the bundle and returns the file name of the bundle.""" - logging.info(' Bundle creation complete.') - self.output.close() - return self.outputName + def close(self): + """Closes the bundle and returns the file name of the bundle.""" + logging.info(' Bundle creation complete.') + self.output.close() + return self.outputName def main(): - """Main task if executed standalone.""" - parser = argparse.ArgumentParser( - description='Uploads an asset bundle file to a github release .') - parser.add_argument( - '--github-api-token', - type=str, - help='Github API token to upload the report as a comment') - parser.add_argument( - '--github-repository', type=str, help='Repository to use for PR comments') - parser.add_argument( - '--release-tag', type=str, help='Release tag to upload asset to') - parser.add_argument( - '--bundle-files', - type=str, - help='A file containing what assets to include') - parser.add_argument( - '--working-directory', - type=str, - help='What directory to use as the current directory for uploading') - parser.add_argument( - '--bundle-name', type=str, help='Prefix to use in the archive file') - parser.add_argument( - '--log-level', - default=logging.INFO, - type=lambda x: getattr(logging, x), - help='Configure the logging level.') - args = parser.parse_args() - - # Ensures somewhat pretty logging of what is going on - logging.basicConfig( - level=args.log_level, - format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') - coloredlogs.install() - - if not args.github_api_token: - logging.error('Required arguments missing: github api token is required') - return - - bundle = BundleBuilder(args.bundle_name, args.bundle_name, - args.working_directory) - - with open(args.bundle_files, 'rt') as bundleInputs: - for fileName in bundleInputs.readlines(): - bundle.appendFile(fileName.strip()) - - assetPath = bundle.close() - - api = github.Github(args.github_api_token) - repo = api.get_repo(args.github_repository) - - logging.info('Connected to github repository') - - release = repo.get_release(args.release_tag) - logging.info('Release "%s" found.' % args.release_tag) - - logging.info('Uploading %s', assetPath) - release.upload_asset(assetPath) - logging.info('Asset upload complete') + """Main task if executed standalone.""" + parser = argparse.ArgumentParser( + description='Uploads an asset bundle file to a github release .') + parser.add_argument( + '--github-api-token', + type=str, + help='Github API token to upload the report as a comment') + parser.add_argument( + '--github-repository', type=str, help='Repository to use for PR comments') + parser.add_argument( + '--release-tag', type=str, help='Release tag to upload asset to') + parser.add_argument( + '--bundle-files', + type=str, + help='A file containing what assets to include') + parser.add_argument( + '--working-directory', + type=str, + help='What directory to use as the current directory for uploading') + parser.add_argument( + '--bundle-name', type=str, help='Prefix to use in the archive file') + parser.add_argument( + '--log-level', + default=logging.INFO, + type=lambda x: getattr(logging, x), + help='Configure the logging level.') + args = parser.parse_args() + + # Ensures somewhat pretty logging of what is going on + logging.basicConfig( + level=args.log_level, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') + coloredlogs.install() + + if not args.github_api_token: + logging.error( + 'Required arguments missing: github api token is required') + return + + bundle = BundleBuilder(args.bundle_name, args.bundle_name, + args.working_directory) + + with open(args.bundle_files, 'rt') as bundleInputs: + for fileName in bundleInputs.readlines(): + bundle.appendFile(fileName.strip()) + + assetPath = bundle.close() + + api = github.Github(args.github_api_token) + repo = api.get_repo(args.github_repository) + + logging.info('Connected to github repository') + + release = repo.get_release(args.release_tag) + logging.info('Release "%s" found.' % args.release_tag) + + logging.info('Uploading %s', assetPath) + release.upload_asset(assetPath) + logging.info('Asset upload complete') if __name__ == '__main__': - # execute only if run as a script - main() + # execute only if run as a script + main() diff --git a/scripts/setup/nrfconnect/update_ncs.py b/scripts/setup/nrfconnect/update_ncs.py index f537e3c9228424..64f09e928202f9 100755 --- a/scripts/setup/nrfconnect/update_ncs.py +++ b/scripts/setup/nrfconnect/update_ncs.py @@ -21,11 +21,13 @@ import subprocess import sys + def get_repository_commit_sha(repository_location): command = ['git', '-C', repository_location, 'rev-parse', 'HEAD'] process = subprocess.run(command, check=True, stdout=subprocess.PIPE) return process.stdout.decode('ascii').strip() + def update_ncs(repository_location, revision, fetch_shallow): # Fetch sdk-nrf to the desired revision. command = ['git', '-C', repository_location, 'fetch'] @@ -37,20 +39,25 @@ def update_ncs(repository_location, revision, fetch_shallow): # Call west update command to update all projects and submodules used by sdk-nrf. command = ['west', 'update'] - command += ['--fetch', 'smart', '--narrow', '-o=--depth=1'] if fetch_shallow else [] + command += ['--fetch', 'smart', '--narrow', + '-o=--depth=1'] if fetch_shallow else [] subprocess.run(command, check=True) + def get_ncs_recommended_revision(): - chip_root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.normpath('../../..'))) + chip_root = os.path.abspath(os.path.join( + os.path.dirname(__file__), os.path.normpath('../../..'))) # Read recommended revision saved in the .nrfconnect-recommended-revision file. try: with open(os.path.join(chip_root, 'config/nrfconnect/.nrfconnect-recommended-revision'), 'r') as f: return f.readline().strip() except: - raise RuntimeError("Encountered problem when trying to read .nrfconnect-recommended-revision file.") + raise RuntimeError( + "Encountered problem when trying to read .nrfconnect-recommended-revision file.") + -def print_messages(messages : list, yellow_text : bool): +def print_messages(messages: list, yellow_text: bool): # Add colour formatting if yellow text was set if yellow_text: messages = [f"\33[33m{message}\x1b[0m" for message in messages] @@ -58,6 +65,7 @@ def print_messages(messages : list, yellow_text : bool): for message in messages: print(message) + def print_check_revision_warning_message(current_revision, recommended_revision): current_revision_message = f"WARNING: Your current NCS revision ({current_revision})" recommended_revision_message = f"differs from the recommended ({recommended_revision})." @@ -66,7 +74,8 @@ def print_check_revision_warning_message(current_revision, recommended_revision) call_command_message = os.path.abspath(__file__) + " --update" # Get the longest message lenght, to fit warning frame size. - longest_message_len = max([len(current_revision_message), len(recommended_revision_message), len(allowed_message), len(update_message), len(call_command_message)]) + longest_message_len = max([len(current_revision_message), len(recommended_revision_message), len( + allowed_message), len(update_message), len(call_command_message)]) # To keep right frame shape the space characters are added to messages shorter than the longest one. fmt = "# {:<%s}#" % (longest_message_len) @@ -74,18 +83,25 @@ def print_check_revision_warning_message(current_revision, recommended_revision) print_messages([(longest_message_len+3)*'#', fmt.format(current_revision_message), fmt.format(recommended_revision_message), fmt.format(''), fmt.format(allowed_message), fmt.format(update_message), fmt.format(call_command_message), (longest_message_len+3)*'#'], sys.stdout.isatty()) + def main(): try: zephyr_base = os.getenv("ZEPHYR_BASE") if not zephyr_base: - raise RuntimeError("No ZEPHYR_BASE environment variable found, please set ZEPHYR_BASE to a zephyr repository path.") - - parser = argparse.ArgumentParser(description='Script helping to update nRF Connect SDK to currently recommended revision.') - parser.add_argument("-c", "--check", help="Check if your current nRF Connect SDK revision is the same as recommended one.", action="store_true") - parser.add_argument("-u", "--update", help="Update your nRF Connect SDK to currently recommended revision.", action="store_true") - parser.add_argument("-s", "--shallow", help="Fetch only specific commits (without the history) when updating nRF Connect SDK.", action="store_true") - parser.add_argument("-q", "--quiet", help="Don't print any message if the check succeeds.", action="store_true") + raise RuntimeError( + "No ZEPHYR_BASE environment variable found, please set ZEPHYR_BASE to a zephyr repository path.") + + parser = argparse.ArgumentParser( + description='Script helping to update nRF Connect SDK to currently recommended revision.') + parser.add_argument( + "-c", "--check", help="Check if your current nRF Connect SDK revision is the same as recommended one.", action="store_true") + parser.add_argument( + "-u", "--update", help="Update your nRF Connect SDK to currently recommended revision.", action="store_true") + parser.add_argument( + "-s", "--shallow", help="Fetch only specific commits (without the history) when updating nRF Connect SDK.", action="store_true") + parser.add_argument( + "-q", "--quiet", help="Don't print any message if the check succeeds.", action="store_true") args = parser.parse_args() ncs_base = os.path.join(zephyr_base, '../nrf') @@ -98,7 +114,8 @@ def main(): current_revision = get_repository_commit_sha(ncs_base) if current_revision != recommended_revision: - print_check_revision_warning_message(current_revision, recommended_revision) + print_check_revision_warning_message( + current_revision, recommended_revision) sys.exit(1) if not args.quiet: @@ -112,5 +129,6 @@ def main(): print(e) sys.exit(1) + if __name__ == '__main__': main() diff --git a/scripts/tools/esp32_log_cat.py b/scripts/tools/esp32_log_cat.py index f6addf0d638632..c8bafd1652e4f5 100755 --- a/scripts/tools/esp32_log_cat.py +++ b/scripts/tools/esp32_log_cat.py @@ -51,7 +51,6 @@ def ExtractSeverity(self, log_line: str): if log_line[0] in 'EWIV': self.severity = log_line[0] - def Log(self, raw): """Converts raw bytes from serial output into python logging. @@ -79,43 +78,44 @@ def Log(self, raw): def main(): - """Main task if executed standalone.""" - parser = argparse.ArgumentParser(description='Output nicely colored logs from esp32') + """Main task if executed standalone.""" + parser = argparse.ArgumentParser( + description='Output nicely colored logs from esp32') - parser.add_argument( - '--device', - default='/dev/ttyUSB0', - type=str, - help='What serial device to open.') + parser.add_argument( + '--device', + default='/dev/ttyUSB0', + type=str, + help='What serial device to open.') - parser.add_argument( - '--baudrate', - default=115200, - type=int, - help='Baudrate for the serial device.') + parser.add_argument( + '--baudrate', + default=115200, + type=int, + help='Baudrate for the serial device.') - parser.add_argument( - '--log-level', - default=logging.DEBUG, - type=lambda x: getattr(logging, x), - help='Log filtering to apply.') + parser.add_argument( + '--log-level', + default=logging.DEBUG, + type=lambda x: getattr(logging, x), + help='Log filtering to apply.') - args = parser.parse_args() + args = parser.parse_args() - # Ensures somewhat pretty logging of what is going on - logging.basicConfig(level=args.log_level); - coloredlogs.install(fmt='%(asctime)s %(name)s %(levelname)-7s %(message)s') + # Ensures somewhat pretty logging of what is going on + logging.basicConfig(level=args.log_level) + coloredlogs.install(fmt='%(asctime)s %(name)s %(levelname)-7s %(message)s') - logger = logging.getLogger(args.device) - logger.setLevel(args.log_level) + logger = logging.getLogger(args.device) + logger.setLevel(args.log_level) - printer = LogPrinter(logger) - ser = serial.Serial(args.device, args.baudrate) - while True: - data = ser.readline() - printer.Log(data) + printer = LogPrinter(logger) + ser = serial.Serial(args.device, args.baudrate) + while True: + data = ser.readline() + printer.Log(data) if __name__ == '__main__': - # execute only if run as a script - main() + # execute only if run as a script + main() diff --git a/scripts/tools/memory/gaps.py b/scripts/tools/memory/gaps.py index fac3a8cb75a3e4..125bc9ce61f815 100755 --- a/scripts/tools/memory/gaps.py +++ b/scripts/tools/memory/gaps.py @@ -88,7 +88,8 @@ def main(argv): else: section = e['elffile'].get_section_by_name(i.section) data = section.data() - limit = memdf.select.get_limit(config, 'section', i.section) + limit = memdf.select.get_limit( + config, 'section', i.section) e['section'][i.section] = section e['data'][i.section] = data e['limit'][i.section] = limit diff --git a/scripts/tools/memory/memdf/df.py b/scripts/tools/memory/memdf/df.py index 45606ea78e4b46..a91e44c669d94b 100644 --- a/scripts/tools/memory/memdf/df.py +++ b/scripts/tools/memory/memdf/df.py @@ -24,6 +24,7 @@ class DF(pd.DataFrame): # pylint: disable=too-many-ancestors """DataFrame builder with default columns and types.""" + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) for c in self.required: diff --git a/scripts/tools/zap/convert.py b/scripts/tools/zap/convert.py index c3b149163589cf..ed616d03f55d1e 100755 --- a/scripts/tools/zap/convert.py +++ b/scripts/tools/zap/convert.py @@ -23,35 +23,44 @@ import sys import urllib.request -CHIP_ROOT_DIR = os.path.realpath(os.path.join(os.path.dirname(__file__), '../../..')) +CHIP_ROOT_DIR = os.path.realpath( + os.path.join(os.path.dirname(__file__), '../../..')) + def checkPythonVersion(): if sys.version_info[0] < 3: - print('Must use Python 3. Current version is ' + str(sys.version_info[0])) + print('Must use Python 3. Current version is ' + + str(sys.version_info[0])) exit(1) + def checkFileExists(path): if not os.path.isfile(path): print('Error: ' + path + ' does not exists or is not a file.') exit(1) + def checkDirExists(path): if not os.path.isdir(path): print('Error: ' + path + ' does not exists or is not a directory.') exit(1) + def getFilePath(name): fullpath = os.path.join(CHIP_ROOT_DIR, name) checkFileExists(fullpath) return fullpath + def getDirPath(name): fullpath = os.path.join(CHIP_ROOT_DIR, name) checkDirExists(fullpath) return fullpath + def runArgumentsParser(): - parser = argparse.ArgumentParser(description='Convert .zap files to the current zap version') + parser = argparse.ArgumentParser( + description='Convert .zap files to the current zap version') parser.add_argument('zap', help='Path to the application .zap file') args = parser.parse_args() @@ -59,13 +68,16 @@ def runArgumentsParser(): return zap_file + def runConversion(zap_file): templates_file = getFilePath('src/app/zap-templates/app-templates.json') zcl_file = getFilePath('src/app/zap-templates/zcl/zcl.json') generator_dir = getDirPath('third_party/zap/repo') os.chdir(generator_dir) - subprocess.check_call(['node', './src-script/zap-convert.js', '-z', zcl_file, '-g', templates_file, '-o', zap_file, zap_file]) + subprocess.check_call(['node', './src-script/zap-convert.js', + '-z', zcl_file, '-g', templates_file, '-o', zap_file, zap_file]) + def main(): checkPythonVersion() @@ -74,5 +86,6 @@ def main(): zap_file = runArgumentsParser() runConversion(zap_file) + if __name__ == '__main__': main() diff --git a/scripts/tools/zap/generate.py b/scripts/tools/zap/generate.py index cf27363b7771c2..cdf4c48f01fcc9 100755 --- a/scripts/tools/zap/generate.py +++ b/scripts/tools/zap/generate.py @@ -23,42 +23,53 @@ import sys import urllib.request -CHIP_ROOT_DIR = os.path.realpath(os.path.join(os.path.dirname(__file__), '../../..')) +CHIP_ROOT_DIR = os.path.realpath( + os.path.join(os.path.dirname(__file__), '../../..')) + def checkPythonVersion(): if sys.version_info[0] < 3: - print('Must use Python 3. Current version is ' + str(sys.version_info[0])) + print('Must use Python 3. Current version is ' + + str(sys.version_info[0])) exit(1) + def checkFileExists(path): if not os.path.isfile(path): print('Error: ' + path + ' does not exists or is not a file.') exit(1) + def checkDirExists(path): if not os.path.isdir(path): print('Error: ' + path + ' does not exists or is not a directory.') exit(1) + def getFilePath(name): fullpath = os.path.join(CHIP_ROOT_DIR, name) checkFileExists(fullpath) return fullpath + def getDirPath(name): fullpath = os.path.join(CHIP_ROOT_DIR, name) checkDirExists(fullpath) return fullpath + def runArgumentsParser(): default_templates = 'src/app/zap-templates/app-templates.json' default_zcl = 'src/app/zap-templates/zcl/zcl.json' default_output_dir = 'gen/' - parser = argparse.ArgumentParser(description='Generate artifacts from .zapt templates') + parser = argparse.ArgumentParser( + description='Generate artifacts from .zapt templates') parser.add_argument('zap', help='Path to the application .zap file') - parser.add_argument('-t', '--templates', default=default_templates, help='Path to the .zapt templates records to use for generating artifacts (default: "' + default_templates + '")') - parser.add_argument('-z', '--zcl', default=default_zcl, help='Path to the zcl templates records to use for generating artifacts (default: "' + default_zcl + '")') + parser.add_argument('-t', '--templates', default=default_templates, + help='Path to the .zapt templates records to use for generating artifacts (default: "' + default_templates + '")') + parser.add_argument('-z', '--zcl', default=default_zcl, + help='Path to the zcl templates records to use for generating artifacts (default: "' + default_zcl + '")') args = parser.parse_args() # By default, this script assumes that the global CHIP template is used with @@ -77,18 +88,24 @@ def runArgumentsParser(): return (zap_file, zcl_file, templates_file, output_dir) + def runGeneration(zap_file, zcl_file, templates_file, output_dir): generator_dir = getDirPath('third_party/zap/repo') os.chdir(generator_dir) - subprocess.check_call(['node', './src-script/zap-generate.js', '-z', zcl_file, '-g', templates_file, '-i', zap_file, '-o', output_dir]) + subprocess.check_call(['node', './src-script/zap-generate.js', '-z', + zcl_file, '-g', templates_file, '-i', zap_file, '-o', output_dir]) + def runClangPrettifier(templates_file, output_dir): - listOfSupportedFileExtensions = ['.js', '.h', '.c', '.hpp', '.cpp', '.m', '.mm'] + listOfSupportedFileExtensions = [ + '.js', '.h', '.c', '.hpp', '.cpp', '.m', '.mm'] try: jsonData = json.loads(Path(templates_file).read_text()) - outputs = [(os.path.join(output_dir, template['output'])) for template in jsonData['templates']] - clangOutputs = list(filter(lambda filepath: os.path.splitext(filepath)[1] in listOfSupportedFileExtensions, outputs)) + outputs = [(os.path.join(output_dir, template['output'])) + for template in jsonData['templates']] + clangOutputs = list(filter(lambda filepath: os.path.splitext( + filepath)[1] in listOfSupportedFileExtensions, outputs)) if len(clangOutputs) > 0: args = ['clang-format', '-i'] @@ -97,27 +114,34 @@ def runClangPrettifier(templates_file, output_dir): except Exception as err: print('clang-format error:', err) + def runJavaPrettifier(templates_file, output_dir): try: jsonData = json.loads(Path(templates_file).read_text()) - outputs = [(os.path.join(output_dir, template['output'])) for template in jsonData['templates']] - javaOutputs = list(filter(lambda filepath: os.path.splitext(filepath)[1] == ".java", outputs)) + outputs = [(os.path.join(output_dir, template['output'])) + for template in jsonData['templates']] + javaOutputs = list( + filter(lambda filepath: os.path.splitext(filepath)[1] == ".java", outputs)) if len(javaOutputs) > 0: # Keep this version in sync with what restyler uses (https://github.com/project-chip/connectedhomeip/blob/master/.restyled.yaml). google_java_format_version = "1.6" - google_java_format_url = 'https://github.com/google/google-java-format/releases/download/google-java-format-' + google_java_format_version + '/' - google_java_format_jar = 'google-java-format-' + google_java_format_version + '-all-deps.jar' + google_java_format_url = 'https://github.com/google/google-java-format/releases/download/google-java-format-' + \ + google_java_format_version + '/' + google_java_format_jar = 'google-java-format-' + \ + google_java_format_version + '-all-deps.jar' jar_url = google_java_format_url + google_java_format_jar - + home = str(Path.home()) - path, http_message = urllib.request.urlretrieve(jar_url, home + '/' + google_java_format_jar) + path, http_message = urllib.request.urlretrieve( + jar_url, home + '/' + google_java_format_jar) args = ['java', '-jar', path, '--replace'] args.extend(javaOutputs) subprocess.check_call(args) except Exception as err: print('google-java-format error:', err) + def main(): checkPythonVersion() @@ -126,5 +150,6 @@ def main(): runClangPrettifier(templates_file, output_dir) runJavaPrettifier(templates_file, output_dir) + if __name__ == '__main__': main() diff --git a/scripts/tools/zap_convert_all.py b/scripts/tools/zap_convert_all.py index b0a129c867af98..86677d06aa4158 100755 --- a/scripts/tools/zap_convert_all.py +++ b/scripts/tools/zap_convert_all.py @@ -20,20 +20,28 @@ import sys import subprocess -CHIP_ROOT_DIR = os.path.realpath(os.path.join(os.path.dirname(__file__), '../..')) +CHIP_ROOT_DIR = os.path.realpath( + os.path.join(os.path.dirname(__file__), '../..')) + def checkPythonVersion(): if sys.version_info[0] < 3: - print('Must use Python 3. Current version is ' + str(sys.version_info[0])) + print('Must use Python 3. Current version is ' + + str(sys.version_info[0])) exit(1) + def getTargets(): targets = [] - targets.extend([[str(filepath)] for filepath in Path('./examples').rglob('*.zap')]) - targets.extend([[str(filepath)] for filepath in Path('./src/darwin').rglob('*.zap')]) - targets.extend([[str(filepath)] for filepath in Path('./src/controller/data_model').rglob('*.zap')]) + targets.extend([[str(filepath)] + for filepath in Path('./examples').rglob('*.zap')]) + targets.extend([[str(filepath)] + for filepath in Path('./src/darwin').rglob('*.zap')]) + targets.extend([[str(filepath)] for filepath in Path( + './src/controller/data_model').rglob('*.zap')]) return targets + def main(): checkPythonVersion() os.chdir(CHIP_ROOT_DIR) @@ -42,5 +50,6 @@ def main(): for target in targets: subprocess.check_call(['./scripts/tools/zap/convert.py'] + target) + if __name__ == '__main__': main() diff --git a/scripts/tools/zap_regen_all.py b/scripts/tools/zap_regen_all.py index 4bdaca6afe55d4..d5caf2f678309a 100755 --- a/scripts/tools/zap_regen_all.py +++ b/scripts/tools/zap_regen_all.py @@ -20,35 +20,50 @@ import sys import subprocess -CHIP_ROOT_DIR = os.path.realpath(os.path.join(os.path.dirname(__file__), '../..')) +CHIP_ROOT_DIR = os.path.realpath( + os.path.join(os.path.dirname(__file__), '../..')) + def checkPythonVersion(): if sys.version_info[0] < 3: - print('Must use Python 3. Current version is ' + str(sys.version_info[0])) + print('Must use Python 3. Current version is ' + + str(sys.version_info[0])) exit(1) + def getGlobalTemplatesTargets(): targets = [] - targets.extend([[str(filepath)] for filepath in Path('./examples').rglob('*.zap')]) - targets.extend([[str(filepath)] for filepath in Path('./src/darwin').rglob('*.zap')]) - targets.extend([[str(filepath)] for filepath in Path('./src/controller/data_model').rglob('*.zap')]) + targets.extend([[str(filepath)] + for filepath in Path('./examples').rglob('*.zap')]) + targets.extend([[str(filepath)] + for filepath in Path('./src/darwin').rglob('*.zap')]) + targets.extend([[str(filepath)] for filepath in Path( + './src/controller/data_model').rglob('*.zap')]) return targets + def getSpecificTemplatesTargets(): targets = [] - targets.append(['src/controller/data_model/controller-clusters.zap', '-t', 'src/app/common/templates/templates.json']) - targets.append(['src/controller/data_model/controller-clusters.zap', '-t', 'examples/chip-tool/templates/templates.json']) - targets.append(['src/controller/data_model/controller-clusters.zap', '-t', 'src/controller/python/templates/templates.json']) - targets.append(['src/controller/data_model/controller-clusters.zap', '-t', 'src/darwin/Framework/CHIP/templates/templates.json']) - targets.append(['src/controller/data_model/controller-clusters.zap', '-t', 'src/controller/java/templates/templates.json']) + targets.append(['src/controller/data_model/controller-clusters.zap', + '-t', 'src/app/common/templates/templates.json']) + targets.append(['src/controller/data_model/controller-clusters.zap', + '-t', 'examples/chip-tool/templates/templates.json']) + targets.append(['src/controller/data_model/controller-clusters.zap', + '-t', 'src/controller/python/templates/templates.json']) + targets.append(['src/controller/data_model/controller-clusters.zap', + '-t', 'src/darwin/Framework/CHIP/templates/templates.json']) + targets.append(['src/controller/data_model/controller-clusters.zap', + '-t', 'src/controller/java/templates/templates.json']) return targets + def getTargets(): targets = [] targets.extend(getGlobalTemplatesTargets()) targets.extend(getSpecificTemplatesTargets()) return targets + def main(): checkPythonVersion() os.chdir(CHIP_ROOT_DIR) @@ -57,5 +72,6 @@ def main(): for target in targets: subprocess.check_call(['./scripts/tools/zap/generate.py'] + target) + if __name__ == '__main__': main() diff --git a/src/app/tests/suites/certification/information.py b/src/app/tests/suites/certification/information.py index 33c23950f04c69..5004fcf7a8f619 100755 --- a/src/app/tests/suites/certification/information.py +++ b/src/app/tests/suites/certification/information.py @@ -27,106 +27,126 @@ # * Pending: Tests are not running in CI, and all tests are disabled # * Partial: Tests are running in CI, but some tests are disabled # * Complete: Tests are running in CI + + class TestStatus(Enum): - missing = 1 - pending = 2 - partial = 3 - complete = 4 + missing = 1 + pending = 2 + partial = 3 + complete = 4 + class ArgOptions(Enum): - summary = 1 - unknown = 2 - missing = 3 - pending = 4 - partial = 5 - complete = 6 + summary = 1 + unknown = 2 + missing = 3 + pending = 4 + partial = 5 + complete = 6 + def checkPythonVersion(): if sys.version_info[0] < 3: - print('Must use Python 3. Current version is ' + str(sys.version_info[0])) + print('Must use Python 3. Current version is ' + + str(sys.version_info[0])) exit(1) + def parseTestPlans(filepath): - tests_names = [] - tests_statuses = [] - rv = [] + tests_names = [] + tests_statuses = [] + rv = [] + + for name, test_plan in parseYaml(filepath)['Test Plans'].items(): + for section, tests in test_plan['tests'].items(): + for index, test in enumerate(tests): + test_name = '_'.join([ + 'Test_TC', + test_plan['shortname'], + str(section), + str(index + 1) + ]) - for name, test_plan in parseYaml(filepath)['Test Plans'].items(): - for section, tests in test_plan['tests'].items(): - for index, test in enumerate(tests): - test_name = '_'.join([ - 'Test_TC', - test_plan['shortname'], - str(section), - str(index + 1) - ]) + tests_names.append(test_name) + tests_statuses.append(parseTestPlan(getPathFor(test_name))) - tests_names.append(test_name) - tests_statuses.append(parseTestPlan(getPathFor(test_name))) + return dict(zip(tests_names, tests_statuses)) - return dict(zip(tests_names, tests_statuses)) def parseTestPlan(filepath): - if not path.exists(filepath): return TestStatus.missing + if not path.exists(filepath): + return TestStatus.missing - is_pending_test = True + is_pending_test = True - for test_definition in parseYaml(filepath)['tests']: - if 'disabled' in test_definition: - if is_pending_test == False: - return TestStatus.partial - else: - is_pending_test = False + for test_definition in parseYaml(filepath)['tests']: + if 'disabled' in test_definition: + if is_pending_test == False: + return TestStatus.partial + else: + is_pending_test = False + + if is_pending_test == True: + return TestStatus.pending - if is_pending_test == True: return TestStatus.pending + return TestStatus.complete - return TestStatus.complete def parseYaml(filepath): - with open(filepath) as file: - return yaml.load(file, Loader=yaml.FullLoader) + with open(filepath) as file: + return yaml.load(file, Loader=yaml.FullLoader) + def getPathFor(filename): - return path.join(path.dirname(__file__), filename + '.yaml') + return path.join(path.dirname(__file__), filename + '.yaml') + def printSummaryFor(name, summary): count = summary[name] total = summary['total'] percent = round(count/total*100, 2) - print(' * ' + name.ljust(10) + ': ' + str(count).rjust(3) + ' (' + str(percent).rjust(5) + '%)') + print(' * ' + name.ljust(10) + ': ' + str(count).rjust(3) + + ' (' + str(percent).rjust(5) + '%)') + def printSummary(statuses): summary = { - 'total': len(statuses), - 'missings': sum(TestStatus.missing == status for status in statuses.values()), - 'pendings': sum(TestStatus.pending == status for status in statuses.values()), - 'partials': sum(TestStatus.partial == status for status in statuses.values()), - 'completes': sum(TestStatus.complete == status for status in statuses.values()), + 'total': len(statuses), + 'missings': sum(TestStatus.missing == status for status in statuses.values()), + 'pendings': sum(TestStatus.pending == status for status in statuses.values()), + 'partials': sum(TestStatus.partial == status for status in statuses.values()), + 'completes': sum(TestStatus.complete == status for status in statuses.values()), } print('Tests count: ', summary['total']) - printSummaryFor('missings', summary); - printSummaryFor('pendings', summary); - printSummaryFor('partials', summary); - printSummaryFor('completes', summary); + printSummaryFor('missings', summary) + printSummaryFor('pendings', summary) + printSummaryFor('partials', summary) + printSummaryFor('completes', summary) + def printUnknown(statuses): - filtered = list(filter(lambda name: name.startswith('Test_TC_'), listdir(path.dirname(__file__)))) - dir_test_names = [path.splitext(name)[0] for name in filtered] + filtered = list(filter(lambda name: name.startswith( + 'Test_TC_'), listdir(path.dirname(__file__)))) + dir_test_names = [path.splitext(name)[0] for name in filtered] - known_test_names = [name for name in statuses] - unknown_test_names = list(filter(lambda name: name not in known_test_names, dir_test_names)) + known_test_names = [name for name in statuses] + unknown_test_names = list( + filter(lambda name: name not in known_test_names, dir_test_names)) + + print('List of tests that are not part of the test plan:') + for name in unknown_test_names: + print(' *', name) - print('List of tests that are not part of the test plan:') - for name in unknown_test_names: - print(' *', name) def printList(statuses, name): - filtered = dict(filter(lambda item: TestStatus[name] == item[1], statuses.items())) + filtered = dict( + filter(lambda item: TestStatus[name] == item[1], statuses.items())) + + print('List of tests with status:', name) + for name in filtered: + print(' *', name) - print('List of tests with status:', name) - for name in filtered: - print(' *', name) def main(): checkPythonVersion() @@ -134,20 +154,23 @@ def main(): default_options = ArgOptions.summary.name default_choices = [name for name in ArgOptions.__members__] - parser = argparse.ArgumentParser(description='Extract information from the set of certifications tests') - parser.add_argument('-s', '--show', default=default_options, choices=default_choices, help='The information that needs to be returned from the test set') + parser = argparse.ArgumentParser( + description='Extract information from the set of certifications tests') + parser.add_argument('-s', '--show', default=default_options, choices=default_choices, + help='The information that needs to be returned from the test set') args = parser.parse_args() statuses = parseTestPlans(getPathFor('tests')) if (ArgOptions.summary.name == args.show): - printSummary(statuses) + printSummary(statuses) elif (ArgOptions.unknown.name == args.show): - printUnknown(statuses) + printUnknown(statuses) elif (args.show in ArgOptions.__members__): - printList(statuses, args.show) + printList(statuses, args.show) else: - parser.print_help() + parser.print_help() + if __name__ == '__main__': main() diff --git a/src/app/zap_cluster_list.py b/src/app/zap_cluster_list.py index bd465690fd10c3..81e9747b0b68fd 100755 --- a/src/app/zap_cluster_list.py +++ b/src/app/zap_cluster_list.py @@ -27,16 +27,16 @@ 'DEVICE_TEMP_CLUSTER': [], 'DIAGNOSTIC_LOGS_CLUSTER': ['diagnostic-logs-server'], 'DOOR_LOCK_CLUSTER': ['door-lock-server'], - 'ETHERNET_NETWORK_DIAGNOSTICS_CLUSTER': ['ethernet_network_diagnostics_server'], + 'ETHERNET_NETWORK_DIAGNOSTICS_CLUSTER': ['ethernet_network_diagnostics_server'], 'FIXED_LABEL_CLUSTER': [], 'FLOW_MEASUREMENT_CLUSTER': [], 'GENERAL_COMMISSIONING_CLUSTER': ['general-commissioning-server'], - 'GENERAL_DIAGNOSTICS_CLUSTER': [], + 'GENERAL_DIAGNOSTICS_CLUSTER': [], 'GROUPS_CLUSTER': ['groups-server'], 'GROUP_KEY_MANAGEMENT_CLUSTER': [], 'IAS_ZONE_CLUSTER': ['ias-zone-server'], 'IDENTIFY_CLUSTER': ['identify'], - 'KEYPAD_INPUT_CLUSTER': [ 'keypad-input-server'], + 'KEYPAD_INPUT_CLUSTER': ['keypad-input-server'], 'LEVEL_CONTROL_CLUSTER': ['level-control'], 'LOW_POWER_CLUSTER': ['low-power-server'], 'MEDIA_INPUT_CLUSTER': ['media-input-server'], @@ -54,7 +54,7 @@ 'RELATIVE_HUMIDITY_MEASUREMENT_CLUSTER': ['relative-humidity-measurement-server'], 'ELECTRICAL_MEASUREMENT_CLUSTER': ['electrical-measurement-server'], 'SCENES_CLUSTER': ['scenes'], - 'SOFTWARE_DIAGNOSTICS_CLUSTER': ['software_diagnostics_server'], + 'SOFTWARE_DIAGNOSTICS_CLUSTER': ['software_diagnostics_server'], 'SWITCH_CLUSTER': [], 'TARGET_NAVIGATOR_CLUSTER': ['target-navigator-server'], 'TEMP_MEASUREMENT_CLUSTER': ['temperature-measurement-server'], @@ -89,11 +89,11 @@ 'DESCRIPTOR_CLUSTER': [], 'DIAGNOSTIC_LOGS_CLUSTER': [], 'DOOR_LOCK_CLUSTER': [], - 'ETHERNET_NETWORK_DIAGNOSTICS_CLUSTER': [], + 'ETHERNET_NETWORK_DIAGNOSTICS_CLUSTER': [], 'FIXED_LABEL_CLUSTER': [], 'FLOW_MEASUREMENT_CLUSTER': [], 'GENERAL_COMMISSIONING_CLUSTER': [], - 'GENERAL_DIAGNOSTICS_CLUSTER': [], + 'GENERAL_DIAGNOSTICS_CLUSTER': [], 'GROUPS_CLUSTER': [], 'GROUP_KEY_MANAGEMENT_CLUSTER': [], 'IAS_ZONE_CLUSTER': ['ias-zone-client'], diff --git a/src/controller/python/build-chip-wheel.py b/src/controller/python/build-chip-wheel.py index 59f1a5a95cbc49..32e39dad39afd1 100644 --- a/src/controller/python/build-chip-wheel.py +++ b/src/controller/python/build-chip-wheel.py @@ -33,22 +33,27 @@ import shutil -parser = argparse.ArgumentParser(description='build the pip package for chip using chip components generated during the build and python source code') -parser.add_argument('--package_name', default='chip', help='configure the python package name') -parser.add_argument('--build_number', default='0.0', help='configure the chip build number') +parser = argparse.ArgumentParser( + description='build the pip package for chip using chip components generated during the build and python source code') +parser.add_argument('--package_name', default='chip', + help='configure the python package name') +parser.add_argument('--build_number', default='0.0', + help='configure the chip build number') parser.add_argument('--build_dir', help='directory to build in') parser.add_argument('--dist_dir', help='directory to place distribution in') parser.add_argument('--manifest', help='list of files to package') -parser.add_argument('--plat-name', help='platform name to embed in generated filenames') +parser.add_argument( + '--plat-name', help='platform name to embed in generated filenames') args = parser.parse_args() + class InstalledScriptInfo: """Information holder about a script that is to be installed.""" def __init__(self, name): - self.name = name - self.installName = os.path.splitext(name)[0] + self.name = name + self.installName = os.path.splitext(name)[0] chipDLLName = '_ChipDeviceCtrl.so' @@ -92,14 +97,14 @@ def __init__(self, name): for entry in manifest['files']: srcDir = os.path.join(manifestBase, entry['src_dir']) for path in entry['sources']: - srcFile = os.path.join(srcDir, path) - dstFile = os.path.join(tmpDir, path) - os.makedirs(os.path.dirname(dstFile), exist_ok=True) - shutil.copyfile(srcFile, dstFile) + srcFile = os.path.join(srcDir, path) + dstFile = os.path.join(tmpDir, path) + os.makedirs(os.path.dirname(dstFile), exist_ok=True) + shutil.copyfile(srcFile, dstFile) for script in installScripts: - os.rename(os.path.join(tmpDir, script.name), - os.path.join(tmpDir, script.installName)) + os.rename(os.path.join(tmpDir, script.name), + os.path.join(tmpDir, script.installName)) # Define a custom version of the bdist_wheel command that configures the # resultant wheel as platform-specific (i.e. not "pure"). @@ -124,21 +129,21 @@ def finalize_options(self): # # Build the chip package... # - packages=[ - 'chip', - 'chip.ble', - 'chip.ble.commissioning', - 'chip.configuration', - 'chip.clusters', - 'chip.discovery', - 'chip.exceptions', - 'chip.internal', - 'chip.interaction_model', - 'chip.logging', - 'chip.native', - 'chip.clusters', - 'chip.tlv', - 'chip.setup_payload', + packages = [ + 'chip', + 'chip.ble', + 'chip.ble.commissioning', + 'chip.configuration', + 'chip.clusters', + 'chip.discovery', + 'chip.exceptions', + 'chip.internal', + 'chip.interaction_model', + 'chip.logging', + 'chip.native', + 'chip.clusters', + 'chip.tlv', + 'chip.setup_payload', ] # Invoke the setuptools 'bdist_wheel' command to generate a wheel containing @@ -159,37 +164,41 @@ def finalize_options(self): python_requires='>=2.7', packages=packages, package_dir={ - '':tmpDir, # By default, look in the tmp directory for packages/modules to be included. + # By default, look in the tmp directory for packages/modules to be included. + '': tmpDir, }, package_data={ - packageName:[ - chipDLLName # Include the wrapper DLL as package data in the "chip" package. + packageName: [ + # Include the wrapper DLL as package data in the "chip" package. + chipDLLName ] }, - scripts = [name for name in map( + scripts=[name for name in map( lambda script: os.path.join(tmpDir, script.installName), installScripts )], install_requires=requiredPackages, options={ - 'bdist_wheel':{ - 'universal':False, - 'dist_dir':distDir, # Place the generated .whl in the dist directory. - 'py_limited_api':'cp37', - 'plat_name':args.plat_name, + 'bdist_wheel': { + 'universal': False, + # Place the generated .whl in the dist directory. + 'dist_dir': distDir, + 'py_limited_api': 'cp37', + 'plat_name': args.plat_name, }, - 'egg_info':{ - 'egg_base':tmpDir # Place the .egg-info subdirectory in the tmp directory. + 'egg_info': { + # Place the .egg-info subdirectory in the tmp directory. + 'egg_base': tmpDir } }, cmdclass={ - 'bdist_wheel':bdist_wheel_override + 'bdist_wheel': bdist_wheel_override }, - script_args=[ 'clean', '--all', 'bdist_wheel' ] + script_args=['clean', '--all', 'bdist_wheel'] ) finally: - + # Switch back to the initial current directory. os.chdir(curDir) diff --git a/src/controller/python/chip-device-ctrl.py b/src/controller/python/chip-device-ctrl.py index 20fdbd218c2869..e594e2aeea6b9d 100755 --- a/src/controller/python/chip-device-ctrl.py +++ b/src/controller/python/chip-device-ctrl.py @@ -102,6 +102,7 @@ def DecodeHexIntOption(option, opt, value): except ValueError: raise OptionValueError("option %s: invalid value: %r" % (opt, value)) + def ParseEncodedString(value): if value.find(":") < 0: raise ParsingError( @@ -158,15 +159,17 @@ def __init__(self, rendezvousAddr=None, controllerNodeId=0, bluetoothAdapter=Non controllerNodeId=controllerNodeId, bluetoothAdapter=bluetoothAdapter) self.commissionableNodeCtrl = ChipCommissionableNodeCtrl.ChipCommissionableNodeController() - + # If we are on Linux and user selects non-default bluetooth adapter. if sys.platform.startswith("linux") and (bluetoothAdapter is not None): try: self.bleMgr = BleManager(self.devCtrl) - self.bleMgr.ble_adapter_select("hci{}".format(bluetoothAdapter)) + self.bleMgr.ble_adapter_select( + "hci{}".format(bluetoothAdapter)) except Exception as ex: traceback.print_exc() - print("Failed to initialize BLE, if you don't have BLE, run chip-device-ctrl with --no-ble") + print( + "Failed to initialize BLE, if you don't have BLE, run chip-device-ctrl with --no-ble") raise ex self.historyFileName = os.path.expanduser( @@ -321,7 +324,6 @@ def do_setlogoutput(self, line): print(str(ex)) return - def do_setuppayload(self, line): """ setup-payload generate [options] @@ -347,15 +349,21 @@ def do_setuppayload(self, line): if arglist[0] == "generate": parser = argparse.ArgumentParser() parser.add_argument("-vr", type=int, default=0, dest='version') - parser.add_argument("-pi", type=int, default=0, dest='productId') - parser.add_argument("-vi", type=int, default=0, dest='vendorId') - parser.add_argument('-cf', type=int, default=0, dest='customFlow') - parser.add_argument("-dc", type=int, default=0, dest='capabilities') - parser.add_argument("-dv", type=int, default=0, dest='discriminator') - parser.add_argument("-ps", type=int, dest='passcode') + parser.add_argument( + "-pi", type=int, default=0, dest='productId') + parser.add_argument( + "-vi", type=int, default=0, dest='vendorId') + parser.add_argument( + '-cf', type=int, default=0, dest='customFlow') + parser.add_argument( + "-dc", type=int, default=0, dest='capabilities') + parser.add_argument( + "-dv", type=int, default=0, dest='discriminator') + parser.add_argument("-ps", type=int, dest='passcode') args = parser.parse_args(arglist[1:]) - SetupPayload().PrintOnboardingCodes(args.passcode, args.vendorId, args.productId, args.discriminator, args.customFlow, args.capabilities, args.version) + SetupPayload().PrintOnboardingCodes(args.passcode, args.vendorId, args.productId, + args.discriminator, args.customFlow, args.capabilities, args.version) if arglist[0] == "parse-manual": SetupPayload().ParseManualPairingCode(arglist[1]).Print() @@ -443,18 +451,22 @@ def ConnectFromSetupPayload(self, setupPayload, nodeid): if int(setupPayload.attributes["RendezvousInformation"]) & onnetwork: print("Attempting to find device on Network") - longDiscriminator = ctypes.c_uint16(int(setupPayload.attributes['Discriminator'])) - self.devCtrl.DiscoverCommissionableNodesLongDiscriminator(longDiscriminator) + longDiscriminator = ctypes.c_uint16( + int(setupPayload.attributes['Discriminator'])) + self.devCtrl.DiscoverCommissionableNodesLongDiscriminator( + longDiscriminator) print("Waiting for device responses...") - strlen = 100; + strlen = 100 addrStrStorage = ctypes.create_string_buffer(strlen) # If this device is on the network and we're looking specifically for 1 device, # expect a quick response. if self.wait_for_one_discovered_device(): - self.devCtrl.GetIPForDiscoveredDevice(0, addrStrStorage, strlen) + self.devCtrl.GetIPForDiscoveredDevice( + 0, addrStrStorage, strlen) addrStr = addrStrStorage.value.decode('utf-8') print("Connecting to device at " + addrStr) - pincode = ctypes.c_uint32(int(setupPayload.attributes['SetUpPINCode'])) + pincode = ctypes.c_uint32( + int(setupPayload.attributes['SetUpPINCode'])) if self.devCtrl.ConnectIP(addrStrStorage, pincode, nodeid): print("Connected") return 0 @@ -466,8 +478,10 @@ def ConnectFromSetupPayload(self, setupPayload, nodeid): if int(setupPayload.attributes["RendezvousInformation"]) & ble: print("Attempting to connect via BLE") - longDiscriminator = ctypes.c_uint16(int(setupPayload.attributes['Discriminator'])) - pincode = ctypes.c_uint32(int(setupPayload.attributes['SetUpPINCode'])) + longDiscriminator = ctypes.c_uint16( + int(setupPayload.attributes['Discriminator'])) + pincode = ctypes.c_uint32( + int(setupPayload.attributes['SetUpPINCode'])) if self.devCtrl.ConnectBLE(longDiscriminator, pincode, nodeid): print("Connected") return 0 @@ -506,7 +520,7 @@ def do_connect(self, line): "utf-8"), int(args[2]), nodeid) elif args[0] == "-ble" and len(args) >= 3: self.devCtrl.ConnectBLE(int(args[1]), int(args[2]), nodeid) - elif args[0] == '-qr' and len(args) >=2: + elif args[0] == '-qr' and len(args) >= 2: if len(args) == 3: nodeid = int(args[2]) print("Parsing QR code {}".format(args[1])) @@ -563,7 +577,7 @@ def do_resolve(self, line): def wait_for_one_discovered_device(self): print("Waiting for device responses...") - strlen = 100; + strlen = 100 addrStrStorage = ctypes.create_string_buffer(strlen) count = 0 maxWaitTime = 2 @@ -600,15 +614,23 @@ def do_discover(self, line): return parser = argparse.ArgumentParser() group = parser.add_mutually_exclusive_group() - group.add_argument('-all', help='discover all commissionable nodes and commissioners', action='store_true') - group.add_argument('-qr', help='discover commissionable nodes matching provided QR code', type=str) - group.add_argument('-l', help='discover commissionable nodes with given long discriminator', type=int) - group.add_argument('-s', help='discover commissionable nodes with given short discriminator', type=int) - group.add_argument('-v', help='discover commissionable nodes wtih given vendor ID', type=int) - group.add_argument('-t', help='discover commissionable nodes with given device type', type=int) - group.add_argument('-c', help='discover commissionable nodes with given commissioning mode', type=int) - group.add_argument('-a', help='discover commissionable nodes put in commissioning mode from command', action='store_true') - args=parser.parse_args(arglist) + group.add_argument( + '-all', help='discover all commissionable nodes and commissioners', action='store_true') + group.add_argument( + '-qr', help='discover commissionable nodes matching provided QR code', type=str) + group.add_argument( + '-l', help='discover commissionable nodes with given long discriminator', type=int) + group.add_argument( + '-s', help='discover commissionable nodes with given short discriminator', type=int) + group.add_argument( + '-v', help='discover commissionable nodes wtih given vendor ID', type=int) + group.add_argument( + '-t', help='discover commissionable nodes with given device type', type=int) + group.add_argument( + '-c', help='discover commissionable nodes with given commissioning mode', type=int) + group.add_argument( + '-a', help='discover commissionable nodes put in commissioning mode from command', action='store_true') + args = parser.parse_args(arglist) if args.all: self.commissionableNodeCtrl.DiscoverCommissioners() self.wait_for_many_discovered_devices() @@ -617,23 +639,30 @@ def do_discover(self, line): self.wait_for_many_discovered_devices() elif args.qr is not None: setupPayload = SetupPayload().ParseQrCode(args.qr) - longDiscriminator = ctypes.c_uint16(int(setupPayload.attributes['Discriminator'])) - self.devCtrl.DiscoverCommissionableNodesLongDiscriminator(longDiscriminator) + longDiscriminator = ctypes.c_uint16( + int(setupPayload.attributes['Discriminator'])) + self.devCtrl.DiscoverCommissionableNodesLongDiscriminator( + longDiscriminator) self.wait_for_one_discovered_device() elif args.l is not None: - self.devCtrl.DiscoverCommissionableNodesLongDiscriminator(ctypes.c_uint16(args.l)) + self.devCtrl.DiscoverCommissionableNodesLongDiscriminator( + ctypes.c_uint16(args.l)) self.wait_for_one_discovered_device() elif args.s is not None: - self.devCtrl.DiscoverCommissionableNodesShortDiscriminator(ctypes.c_uint16(args.s)) + self.devCtrl.DiscoverCommissionableNodesShortDiscriminator( + ctypes.c_uint16(args.s)) self.wait_for_one_discovered_device() elif args.v is not None: - self.devCtrl.DiscoverCommissionableNodesVendor(ctypes.c_uint16(args.v)) + self.devCtrl.DiscoverCommissionableNodesVendor( + ctypes.c_uint16(args.v)) self.wait_for_many_discovered_devices() elif args.t is not None: - self.devCtrl.DiscoverCommissionableNodesDeviceType(ctypes.c_uint16(args.t)) + self.devCtrl.DiscoverCommissionableNodesDeviceType( + ctypes.c_uint16(args.t)) self.wait_for_many_discovered_devices() elif args.c is not None: - self.devCtrl.DiscoverCommissionableNodesCommissioningEnabled(ctypes.c_uint16(args.c)) + self.devCtrl.DiscoverCommissionableNodesCommissioningEnabled( + ctypes.c_uint16(args.c)) self.wait_for_many_discovered_devices() elif args.a is not None: self.devCtrl.DiscoverCommissionableNodesCommissioningEnabledFromCommand() @@ -748,7 +777,8 @@ def do_zclwrite(self, line): if args[1] not in all_attrs: raise exceptions.UnknownCluster(args[1]) cluster_attrs = all_attrs.get(args[1], {}) - print('\n'.join(["{}: {}".format(key, cluster_attrs[key]["type"]) for key in cluster_attrs.keys() if cluster_attrs[key].get("writable", False)])) + print('\n'.join(["{}: {}".format(key, cluster_attrs[key]["type"]) + for key in cluster_attrs.keys() if cluster_attrs[key].get("writable", False)])) elif len(args) == 6: if args[0] not in all_attrs: raise exceptions.UnknownCluster(args[0]) @@ -779,7 +809,8 @@ def do_zclconfigure(self, line): if args[1] not in all_attrs: raise exceptions.UnknownCluster(args[1]) cluster_attrs = all_attrs.get(args[1], {}) - print('\n'.join([key for key in cluster_attrs.keys() if cluster_attrs[key].get("reportable", False)])) + print('\n'.join([key for key in cluster_attrs.keys( + ) if cluster_attrs[key].get("reportable", False)])) elif len(args) == 7: if args[0] not in all_attrs: raise exceptions.UnknownCluster(args[0]) diff --git a/src/controller/python/chip-repl.py b/src/controller/python/chip-repl.py index 9e44f1df276cc9..99ad8714a119e7 100644 --- a/src/controller/python/chip-repl.py +++ b/src/controller/python/chip-repl.py @@ -23,6 +23,7 @@ import coloredlogs import logging + def main(): # The chip imports at the top level will be visible in the ipython REPL. @@ -32,7 +33,7 @@ def main(): # trace/debug logging is not friendly to an interactive console. Only keep errors. logging.getLogger().setLevel(logging.ERROR) - embed(header = ''' + embed(header=''' Welcome to the CHIP python REPL utilty. Usage examples: @@ -92,5 +93,6 @@ def main(): '''.strip()) + if __name__ == "__main__": main() diff --git a/src/controller/python/chip/ChipBleBase.py b/src/controller/python/chip/ChipBleBase.py index 37e9316ea78bc8..b7c750a06e9f3e 100644 --- a/src/controller/python/chip/ChipBleBase.py +++ b/src/controller/python/chip/ChipBleBase.py @@ -63,7 +63,8 @@ def ParseInputLine(self, line, cmd=None): type="float", default=10.0, ) - optParser.add_option("-q", "--quiet", action="store_true", dest="quiet") + optParser.add_option( + "-q", "--quiet", action="store_true", dest="quiet") try: (options, remainingArgs) = optParser.parse_args(args) diff --git a/src/controller/python/chip/ChipBleUtility.py b/src/controller/python/chip/ChipBleUtility.py index 24555355165442..95abc3b7c8be76 100644 --- a/src/controller/python/chip/ChipBleUtility.py +++ b/src/controller/python/chip/ChipBleUtility.py @@ -195,7 +195,8 @@ def Print(self, prefix=""): "%sBleEvent Type: %s" % ( prefix, - ("SUBSCRIBE" if self.EventType == BLE_EVENT_TYPE_SUBSCRIBE else "ERROR"), + ("SUBSCRIBE" if self.EventType == + BLE_EVENT_TYPE_SUBSCRIBE else "ERROR"), ) ) print("%sStatus: %s" % (prefix, str(self.Status))) @@ -238,7 +239,8 @@ def SetField(self, name, val): class BleTxEventStruct(Structure): _fields_ = [ ("EventType", c_int32), # The type of event. - ("ConnObj", c_void_p), # a Handle back to the connection object or None. + # a Handle back to the connection object or None. + ("ConnObj", c_void_p), ("SvcId", c_void_p), # the byte array of the service UUID. ("CharId", c_void_p), # the byte array of the characteristic UUID. ("Status", c_bool), # The status of the previous Tx request @@ -256,8 +258,10 @@ def fromBleTxEvent(cls, bleTxEvent): bleTxEventStruct = cls() bleTxEventStruct.EventType = bleTxEvent.EventType bleTxEventStruct.ConnObj = c_void_p(FAKE_CONN_OBJ_VALUE) - bleTxEventStruct.SvcId = ChipUtility.ByteArrayToVoidPtr(bleTxEvent.SvcId) - bleTxEventStruct.CharId = ChipUtility.ByteArrayToVoidPtr(bleTxEvent.CharId) + bleTxEventStruct.SvcId = ChipUtility.ByteArrayToVoidPtr( + bleTxEvent.SvcId) + bleTxEventStruct.CharId = ChipUtility.ByteArrayToVoidPtr( + bleTxEvent.CharId) bleTxEventStruct.Status = bleTxEvent.Status return bleTxEventStruct @@ -265,7 +269,8 @@ def fromBleTxEvent(cls, bleTxEvent): class BleDisconnectEventStruct(Structure): _fields_ = [ ("EventType", c_int32), # The type of event. - ("ConnObj", c_void_p), # a Handle back to the connection object or None. + # a Handle back to the connection object or None. + ("ConnObj", c_void_p), ("Error", c_int32), # The disconnect error code. ] @@ -284,7 +289,8 @@ def fromBleDisconnectEvent(cls, bleDisconnectEvent): class BleRxEventStruct(Structure): _fields_ = [ ("EventType", c_int32), # The type of event. - ("ConnObj", c_void_p), # a Handle back to the connection object or None. + # a Handle back to the connection object or None. + ("ConnObj", c_void_p), ("SvcId", c_void_p), # the byte array of the service UUID. ("CharId", c_void_p), # the byte array of the characteristic UUID. ("Buffer", c_void_p), # the byte array of the Rx packet. @@ -303,9 +309,12 @@ def fromBleRxEvent(cls, bleRxEvent): bleRxEventStruct = cls() bleRxEventStruct.EventType = bleRxEvent.EventType bleRxEventStruct.ConnObj = c_void_p(FAKE_CONN_OBJ_VALUE) - bleRxEventStruct.SvcId = ChipUtility.ByteArrayToVoidPtr(bleRxEvent.SvcId) - bleRxEventStruct.CharId = ChipUtility.ByteArrayToVoidPtr(bleRxEvent.CharId) - bleRxEventStruct.Buffer = ChipUtility.ByteArrayToVoidPtr(bleRxEvent.Buffer) + bleRxEventStruct.SvcId = ChipUtility.ByteArrayToVoidPtr( + bleRxEvent.SvcId) + bleRxEventStruct.CharId = ChipUtility.ByteArrayToVoidPtr( + bleRxEvent.CharId) + bleRxEventStruct.Buffer = ChipUtility.ByteArrayToVoidPtr( + bleRxEvent.Buffer) bleRxEventStruct.Length = ( len(bleRxEvent.Buffer) if (bleRxEvent.Buffer != None) else 0 ) @@ -315,7 +324,8 @@ def fromBleRxEvent(cls, bleRxEvent): class BleSubscribeEventStruct(Structure): _fields_ = [ ("EventType", c_int32), # The type of event. - ("ConnObj", c_void_p), # a Handle back to the connection object or None. + # a Handle back to the connection object or None. + ("ConnObj", c_void_p), ("SvcId", c_void_p), # the byte array of the service UUID. ("CharId", c_void_p), # the byte array of the characteristic UUID. ("Operation", c_int32), # The subscribe operation. @@ -345,6 +355,7 @@ def fromBleSubscribeEvent(cls, bleSubscribeEvent): bleSubscribeEventStruct.Status = bleSubscribeEvent.Status return bleSubscribeEventStruct + class BleDeviceIdentificationInfo: def __init__(self, pairingState, discriminator, vendorId, productId): self.pairingState = pairingState @@ -352,6 +363,7 @@ def __init__(self, pairingState, discriminator, vendorId, productId): self.vendorId = vendorId self.productId = productId + def ParseServiceData(data): if len(data) != 7: return None diff --git a/src/controller/python/chip/ChipBluezMgr.py b/src/controller/python/chip/ChipBluezMgr.py index f40e81f59bbe02..587a160ef6e5cb 100644 --- a/src/controller/python/chip/ChipBluezMgr.py +++ b/src/controller/python/chip/ChipBluezMgr.py @@ -71,7 +71,8 @@ chip_rx = uuid.UUID("18EE2EF5-263D-4559-959F-4F9C429F9D12") chip_service_short = uuid.UUID("0000FFF6-0000-0000-0000-000000000000") chromecast_setup_service = uuid.UUID("0000FEA0-0000-1000-8000-00805F9B34FB") -chromecast_setup_service_short = uuid.UUID("0000FEA0-0000-0000-0000-000000000000") +chromecast_setup_service_short = uuid.UUID( + "0000FEA0-0000-0000-0000-000000000000") BLUEZ_NAME = "org.bluez" ADAPTER_INTERFACE = BLUEZ_NAME + ".Adapter1" @@ -162,7 +163,8 @@ def adapter_on_prop_changed_cb( if len(invalidated_properties) > 0: self.logger.debug( - "invalidated_properties is not empty %s" % str(invalidated_properties) + "invalidated_properties is not empty %s" % str( + invalidated_properties) ) return @@ -235,7 +237,8 @@ def SetDiscoveryFilter(self, dict): @property def Discovering(self): try: - result = self.adapter_properties.Get(ADAPTER_INTERFACE, "Discovering") + result = self.adapter_properties.Get( + ADAPTER_INTERFACE, "Discovering") return bool(result) except dbus.exceptions.DBusException as ex: self.logger.debug(str(ex)) @@ -259,7 +262,8 @@ def DiscoverableTimeout(self, timeoutSec): def Powered(self, enable): try: - result = self.adapter_properties.Set(ADAPTER_INTERFACE, "Powered", enable) + result = self.adapter_properties.Set( + ADAPTER_INTERFACE, "Powered", enable) return bool(result) except dbus.exceptions.DBusException as ex: self.logger.debug(str(ex)) @@ -371,7 +375,8 @@ def device_on_prop_changed_cb( if len(invalidated_properties) > 0: self.logger.debug( - "invalidated_properties is not empty %s" % str(invalidated_properties) + "invalidated_properties is not empty %s" % str( + invalidated_properties) ) return @@ -541,7 +546,8 @@ def ServiceData(self): @property def ServicesResolved(self): try: - result = self.device_properties.Get(DEVICE_INTERFACE, "ServicesResolved") + result = self.device_properties.Get( + DEVICE_INTERFACE, "ServicesResolved") return bool(result) except dbus.exceptions.DBusException as ex: self.logger.debug(str(ex)) @@ -590,7 +596,8 @@ def uuid(self): @property def Primary(self): try: - result = bool(self.service_properties.Get(SERVICE_INTERFACE, "Primary")) + result = bool(self.service_properties.Get( + SERVICE_INTERFACE, "Primary")) return result except dbus.exceptions.DBusException as ex: self.logger.debug(str(ex)) @@ -641,8 +648,10 @@ class BluezDbusGattCharacteristic: def __init__(self, bluez_obj, bluez, bus, logger=None): self.logger = logger if logger else logging.getLogger("ChipBLEMgr") self.object = bluez_obj - self.characteristic = dbus.Interface(bluez_obj, CHARACTERISTIC_INTERFACE) - self.characteristic_properties = dbus.Interface(bluez_obj, DBUS_PROPERTIES) + self.characteristic = dbus.Interface( + bluez_obj, CHARACTERISTIC_INTERFACE) + self.characteristic_properties = dbus.Interface( + bluez_obj, DBUS_PROPERTIES) self.received = None self.path = self.characteristic.object_path self.bluez = bluez @@ -692,7 +701,8 @@ def gatt_on_characteristic_changed_cb( self, interface, changed_properties, invalidated_properties ): self.logger.debug( - "property change in" + str(self.characteristic) + str(changed_properties) + "property change in" + + str(self.characteristic) + str(changed_properties) ) if len(changed_properties) == 0: @@ -725,7 +735,8 @@ def uuid(self): try: result = uuid.UUID( str( - self.characteristic_properties.Get(CHARACTERISTIC_INTERFACE, "UUID") + self.characteristic_properties.Get( + CHARACTERISTIC_INTERFACE, "UUID") ) ) return result @@ -804,7 +815,8 @@ def __init__(self, devMgr, logger=None): dbus.mainloop.glib.threads_init() self.bus = dbus.SystemBus() self.bluez = dbus.Interface( - self.bus.get_object(BLUEZ_NAME, "/"), "org.freedesktop.DBus.ObjectManager" + self.bus.get_object( + BLUEZ_NAME, "/"), "org.freedesktop.DBus.ObjectManager" ) self.target = None self.service = None @@ -838,24 +850,26 @@ def get_adapters(self): ) ] - def ble_adapter_print(self): try: adapters = [ - BluezDbusAdapter(p["object"], self.bluez, self.bus, self.logger) + BluezDbusAdapter(p["object"], self.bluez, + self.bus, self.logger) for p in get_bluez_objects( self.bluez, self.bus, ADAPTER_INTERFACE, "/org/bluez" ) ] for i in range(len(adapters)): - self.logger.info("AdapterName: %s AdapterAddress: %s" % (adapters[i].path.replace("/org/bluez/", ""), adapters[i].Address)) + self.logger.info("AdapterName: %s AdapterAddress: %s" % ( + adapters[i].path.replace("/org/bluez/", ""), adapters[i].Address)) except dbus.exceptions.DBusException as ex: self.logger.debug(str(ex)) def get_adapter_by_addr(self, identifier): try: adapters = [ - BluezDbusAdapter(p["object"], self.bluez, self.bus, self.logger) + BluezDbusAdapter(p["object"], self.bluez, + self.bus, self.logger) for p in get_bluez_objects( self.bluez, self.bus, ADAPTER_INTERFACE, "/org/bluez" ) @@ -867,7 +881,8 @@ def get_adapter_by_addr(self, identifier): if str(adapter.Address).upper() == str(identifier).upper() or "/org/bluez/{}".format(identifier) == str(adapter.path): return adapter self.logger.info( - "adapter %s cannot be found, expect the ble mac address" % (identifier) + "adapter %s cannot be found, expect the ble mac address" % ( + identifier) ) return None @@ -905,7 +920,8 @@ def setInputHook(self, hookFunc): self.hookFuncPtr = hookFunctionType(hookFunc) pyos_inputhook_ptr = c_void_p.in_dll(pythonapi, "PyOS_InputHook") # save the original so that on del we can revert it back to the way it was. - self.orig_input_hook = cast(pyos_inputhook_ptr.value, PYFUNCTYPE(c_int)) + self.orig_input_hook = cast( + pyos_inputhook_ptr.value, PYFUNCTYPE(c_int)) # set the new hook. readLine will call this periodically as it polls for input. pyos_inputhook_ptr.value = cast(self.hookFuncPtr, c_void_p).value @@ -929,15 +945,21 @@ def dump_scan_result(self, device): devIdInfo = self.get_peripheral_devIdInfo(device) if devIdInfo != None: - self.logger.info("{0:<16}= {1}".format("Pairing State", devIdInfo.pairingState)) - self.logger.info("{0:<16}= {1}".format("Discriminator", devIdInfo.discriminator)) - self.logger.info("{0:<16}= {1}".format("Vendor Id", devIdInfo.vendorId)) - self.logger.info("{0:<16}= {1}".format("Product Id", devIdInfo.productId)) + self.logger.info("{0:<16}= {1}".format( + "Pairing State", devIdInfo.pairingState)) + self.logger.info("{0:<16}= {1}".format( + "Discriminator", devIdInfo.discriminator)) + self.logger.info("{0:<16}= {1}".format( + "Vendor Id", devIdInfo.vendorId)) + self.logger.info("{0:<16}= {1}".format( + "Product Id", devIdInfo.productId)) if device.ServiceData: for advuuid in device.ServiceData: - self.logger.info("{0:<16}= {1}".format("Adv UUID", str(advuuid))) - self.logger.info("{0:<16}= {1}".format("Adv Data", bytes(device.ServiceData[advuuid]).hex())) + self.logger.info("{0:<16}= {1}".format( + "Adv UUID", str(advuuid))) + self.logger.info("{0:<16}= {1}".format( + "Adv Data", bytes(device.ServiceData[advuuid]).hex())) else: self.logger.info("") self.logger.info("") diff --git a/src/controller/python/chip/ChipCommissionableNodeCtrl.py b/src/controller/python/chip/ChipCommissionableNodeCtrl.py index d41105c79b5764..83a069a7ecc67d 100644 --- a/src/controller/python/chip/ChipCommissionableNodeCtrl.py +++ b/src/controller/python/chip/ChipCommissionableNodeCtrl.py @@ -33,6 +33,7 @@ __all__ = ["ChipCommissionableNodeController"] + def _singleton(cls): instance = [None] @@ -43,6 +44,7 @@ def wrapper(*args, **kwargs): return wrapper + @_singleton class ChipCommissionableNodeController(object): def __init__(self, startNetworkThread=True): @@ -69,12 +71,14 @@ def __del__(self): def PrintDiscoveredCommissioners(self): return self._ChipStack.Call( - lambda: self._dmLib.pychip_CommissionableNodeController_PrintDiscoveredCommissioners(self.commissionableNodeCtrl) + lambda: self._dmLib.pychip_CommissionableNodeController_PrintDiscoveredCommissioners( + self.commissionableNodeCtrl) ) def DiscoverCommissioners(self): return self._ChipStack.Call( - lambda: self._dmLib.pychip_CommissionableNodeController_DiscoverCommissioners(self.commissionableNodeCtrl) + lambda: self._dmLib.pychip_CommissionableNodeController_DiscoverCommissioners( + self.commissionableNodeCtrl) ) # ----- Private Members ----- @@ -90,7 +94,9 @@ def _InitLib(self): c_void_p] self._dmLib.pychip_CommissionableNodeController_DeleteController.restype = c_uint32 - self._dmLib.pychip_CommissionableNodeController_DiscoverCommissioners.argtypes = [c_void_p] + self._dmLib.pychip_CommissionableNodeController_DiscoverCommissioners.argtypes = [ + c_void_p] self._dmLib.pychip_CommissionableNodeController_DiscoverCommissioners.restype = c_uint32 - self._dmLib.pychip_CommissionableNodeController_PrintDiscoveredCommissioners.argtypes = [c_void_p] \ No newline at end of file + self._dmLib.pychip_CommissionableNodeController_PrintDiscoveredCommissioners.argtypes = [ + c_void_p] diff --git a/src/controller/python/chip/ChipCoreBluetoothMgr.py b/src/controller/python/chip/ChipCoreBluetoothMgr.py index 7428aed4a8876e..394ec0f2b046e8 100644 --- a/src/controller/python/chip/ChipCoreBluetoothMgr.py +++ b/src/controller/python/chip/ChipCoreBluetoothMgr.py @@ -106,20 +106,22 @@ def _VoidPtrToCBUUID(ptr, len): return ptr + class LoopCondition: - def __init__(self, op, timelimit, arg = None): + def __init__(self, op, timelimit, arg=None): self.op = op self.due = time.time() + timelimit self.arg = arg - + def TimeLimitExceeded(self): return time.time() > self.due + class BlePeripheral: def __init__(self, peripheral, advData): self.peripheral = peripheral self.advData = dict(advData) - + def __eq__(self, another): return self.peripheral == another.peripheral @@ -136,6 +138,7 @@ def getPeripheralDevIdInfo(self): return ParseServiceData(bytes(servDataDict[i])) return None + class CoreBluetoothManager(ChipBleBase): def __init__(self, devCtrl, logger=None): if logger: @@ -161,9 +164,11 @@ def __init__(self, devCtrl, logger=None): self.ready_condition = False self.loop_condition = ( - False # indicates whether the cmd requirement has been met in the runloop. + # indicates whether the cmd requirement has been met in the runloop. + False ) - self.connect_state = False # reflects whether or not there is a connection. + # reflects whether or not there is a connection. + self.connect_state = False self.send_condition = False self.subscribe_condition = False @@ -217,11 +222,12 @@ def setInputHook(self, hookFunc): self.hookFuncPtr = hookFunctionType(hookFunc) pyos_inputhook_ptr = c_void_p.in_dll(pythonapi, "PyOS_InputHook") # save the original so that on del we can revert it back to the way it was. - self.orig_input_hook = cast(pyos_inputhook_ptr.value, PYFUNCTYPE(c_int)) + self.orig_input_hook = cast( + pyos_inputhook_ptr.value, PYFUNCTYPE(c_int)) # set the new hook. readLine will call this periodically as it polls for input. pyos_inputhook_ptr.value = cast(self.hookFuncPtr, c_void_p).value - def shouldLoop(self, cond:LoopCondition): + def shouldLoop(self, cond: LoopCondition): """ Used by runLoopUntil to determine whether it should exit the runloop. """ if cond.TimeLimitExceeded(): @@ -249,7 +255,7 @@ def shouldLoop(self, cond:LoopCondition): return True - def runLoopUntil(self, cond:LoopCondition): + def runLoopUntil(self, cond: LoopCondition): """Helper function to drive OSX runloop until an expected event is received or the timeout expires.""" runLoop = NSRunLoop.currentRunLoop() @@ -277,7 +283,8 @@ def centralManager_didDiscoverPeripheral_advertisementData_RSSI_( self.logger.info("adding to scan list:") self.logger.info("") self.logger.info( - "{0:<16}= {1:<80}".format("Name", str(peripheral._.name)) + "{0:<16}= {1:<80}".format( + "Name", str(peripheral._.name)) ) self.logger.info( "{0:<16}= {1:<80}".format( @@ -285,17 +292,23 @@ def centralManager_didDiscoverPeripheral_advertisementData_RSSI_( ) ) self.logger.info("{0:<16}= {1:<80}".format("RSSI", rssi)) - devIdInfo = BlePeripheral(peripheral, data).getPeripheralDevIdInfo() + devIdInfo = BlePeripheral( + peripheral, data).getPeripheralDevIdInfo() if devIdInfo: - self.logger.info("{0:<16}= {1}".format("Pairing State", devIdInfo.pairingState)) - self.logger.info("{0:<16}= {1}".format("Discriminator", devIdInfo.discriminator)) - self.logger.info("{0:<16}= {1}".format("Vendor Id", devIdInfo.vendorId)) - self.logger.info("{0:<16}= {1}".format("Product Id", devIdInfo.productId)) + self.logger.info("{0:<16}= {1}".format( + "Pairing State", devIdInfo.pairingState)) + self.logger.info("{0:<16}= {1}".format( + "Discriminator", devIdInfo.discriminator)) + self.logger.info("{0:<16}= {1}".format( + "Vendor Id", devIdInfo.vendorId)) + self.logger.info("{0:<16}= {1}".format( + "Product Id", devIdInfo.productId)) self.logger.info("ADV data: " + repr(data)) self.logger.info("") self.peripheral_list.append(peripheral) - self.peripheral_adv_list.append(BlePeripheral(peripheral, data)) + self.peripheral_adv_list.append( + BlePeripheral(peripheral, data)) else: if (peripheral._.name == self.bg_peripheral_name) or (str(devIdInfo.discriminator) == self.bg_peripheral_name): if len(self.peripheral_list) == 0: @@ -372,7 +385,8 @@ def peripheral_didDiscoverCharacteristicsForService_error_( self.connect_state = True else: - self.logger.error("ERROR: failed to discover characteristics for service.") + self.logger.error( + "ERROR: failed to discover characteristics for service.") self.connect_state = False self.loop_condition = True @@ -461,7 +475,8 @@ def GetBleEvent(self): eventStruct = BleSubscribeEventStruct.fromBleSubscribeEvent(ev) return cast(pointer(eventStruct), c_void_p).value elif isinstance(ev, BleDisconnectEvent): - eventStruct = BleDisconnectEventStruct.fromBleDisconnectEvent(ev) + eventStruct = BleDisconnectEventStruct.fromBleDisconnectEvent( + ev) return cast(pointer(eventStruct), c_void_p).value return None diff --git a/src/controller/python/chip/ChipDeviceCtrl.py b/src/controller/python/chip/ChipDeviceCtrl.py index 0e9dc000d752a4..8e37ab0e78aa0a 100644 --- a/src/controller/python/chip/ChipDeviceCtrl.py +++ b/src/controller/python/chip/ChipDeviceCtrl.py @@ -40,7 +40,8 @@ __all__ = ["ChipDeviceController"] _DevicePairingDelegate_OnPairingCompleteFunct = CFUNCTYPE(None, c_uint32) -_DevicePairingDelegate_OnCommissioningCompleteFunct = CFUNCTYPE(None, c_uint64, c_uint32) +_DevicePairingDelegate_OnCommissioningCompleteFunct = CFUNCTYPE( + None, c_uint64, c_uint32) _DeviceAddressUpdateDelegate_OnUpdateComplete = CFUNCTYPE( None, c_uint64, c_uint32) # void (*)(Device *, CHIP_ERROR). @@ -72,6 +73,7 @@ class DCState(enum.IntEnum): RENDEZVOUS_ONGOING = 3 RENDEZVOUS_CONNECTED = 4 + @_singleton class ChipDeviceController(object): def __init__(self, startNetworkThread=True, controllerNodeId=0, bluetoothAdapter=None): @@ -178,12 +180,14 @@ def ConnectBLE(self, discriminator, setupPinCode, nodeid): def CloseBLEConnection(self): return self._ChipStack.Call( - lambda: self._dmLib.pychip_DeviceCommissioner_CloseBleConnection(self.devCtrl) + lambda: self._dmLib.pychip_DeviceCommissioner_CloseBleConnection( + self.devCtrl) ) def CloseSession(self, nodeid): return self._ChipStack.Call( - lambda: self._dmLib.pychip_DeviceController_CloseSession(self.devCtrl, nodeid) + lambda: self._dmLib.pychip_DeviceController_CloseSession( + self.devCtrl, nodeid) ) def ConnectIP(self, ipaddr, setupPinCode, nodeid): @@ -208,55 +212,66 @@ def GetAddressAndPort(self, nodeid): ) return (address.value.decode(), port.value) if error == 0 else None + def DiscoverCommissionableNodesLongDiscriminator(self, long_discriminator): return self._ChipStack.Call( - lambda: self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesLongDiscriminator(self.devCtrl, long_discriminator) + lambda: self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesLongDiscriminator( + self.devCtrl, long_discriminator) ) def DiscoverCommissionableNodesShortDiscriminator(self, short_discriminator): return self._ChipStack.Call( - lambda: self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesShortDiscriminator(self.devCtrl, short_discriminator) + lambda: self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesShortDiscriminator( + self.devCtrl, short_discriminator) ) - + def DiscoverCommissionableNodesVendor(self, vendor): return self._ChipStack.Call( - lambda: self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesVendor(self.devCtrl, vendor) + lambda: self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesVendor( + self.devCtrl, vendor) ) def DiscoverCommissionableNodesDeviceType(self, device_type): return self._ChipStack.Call( - lambda: self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesDeviceType(self.devCtrl, device_type) + lambda: self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesDeviceType( + self.devCtrl, device_type) ) def DiscoverCommissionableNodesCommissioningEnabled(self, enabled): return self._ChipStack.Call( - lambda: self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesCommissioningEnabled(self.devCtrl, enabled) + lambda: self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesCommissioningEnabled( + self.devCtrl, enabled) ) def DiscoverCommissionableNodesCommissioningEnabledFromCommand(self): return self._ChipStack.Call( - lambda: self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesCommissioningEnabledFromCommand(self.devCtrl) + lambda: self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesCommissioningEnabledFromCommand( + self.devCtrl) ) def PrintDiscoveredDevices(self): return self._ChipStack.Call( - lambda: self._dmLib.pychip_DeviceController_PrintDiscoveredDevices(self.devCtrl) + lambda: self._dmLib.pychip_DeviceController_PrintDiscoveredDevices( + self.devCtrl) ) def ParseQRCode(self, qrCode, output): print(output) return self._ChipStack.Call( - lambda: self._dmLib.pychip_DeviceController_ParseQRCode(qrCode, output) + lambda: self._dmLib.pychip_DeviceController_ParseQRCode( + qrCode, output) ) def GetIPForDiscoveredDevice(self, idx, addrStr, length): return self._ChipStack.Call( - lambda: self._dmLib.pychip_DeviceController_GetIPForDiscoveredDevice(self.devCtrl, idx, addrStr, length) + lambda: self._dmLib.pychip_DeviceController_GetIPForDiscoveredDevice( + self.devCtrl, idx, addrStr, length) ) def DiscoverAllCommissioning(self): return self._ChipStack.Call( - lambda: self._dmLib.pychip_DeviceController_DiscoverAllCommissionableNodes(self.devCtrl) + lambda: self._dmLib.pychip_DeviceController_DiscoverAllCommissionableNodes( + self.devCtrl) ) def GetFabricId(self): @@ -284,7 +299,8 @@ def ZCLSend(self, cluster, command, nodeid, endpoint, groupid, args, blocking=Fa if res != 0: raise self._ChipStack.ErrorToException(res) im.ClearCommandStatus(im.PLACEHOLDER_COMMAND_HANDLE) - self._Cluster.SendCommand(device, cluster, command, endpoint, groupid, args, True) + self._Cluster.SendCommand( + device, cluster, command, endpoint, groupid, args, True) if blocking: # We only send 1 command by this function, so index is always 0 return im.WaitCommandIndexStatus(im.PLACEHOLDER_COMMAND_HANDLE, 1) @@ -300,7 +316,8 @@ def ZCLReadAttribute(self, cluster, attribute, nodeid, endpoint, groupid, blocki raise self._ChipStack.ErrorToException(res) # We are not using IM for Attributes. - res = self._Cluster.ReadAttribute(device, cluster, attribute, endpoint, groupid, False) + res = self._Cluster.ReadAttribute( + device, cluster, attribute, endpoint, groupid, False) if blocking: return im.GetAttributeReadResponse(im.DEFAULT_ATTRIBUTEREAD_APPID) @@ -375,36 +392,47 @@ def _InitLib(self): self._dmLib.pychip_DeviceController_ConnectIP.argtypes = [ c_void_p, c_char_p, c_uint32, c_uint64] - self._dmLib.pychip_DeviceController_DiscoverAllCommissionableNodes.argtypes = [c_void_p] + self._dmLib.pychip_DeviceController_DiscoverAllCommissionableNodes.argtypes = [ + c_void_p] self._dmLib.pychip_DeviceController_DiscoverAllCommissionableNodes.restype = c_uint32 - self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesLongDiscriminator.argtypes = [c_void_p, c_uint16] + self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesLongDiscriminator.argtypes = [ + c_void_p, c_uint16] self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesLongDiscriminator.restype = c_uint32 - self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesShortDiscriminator.argtypes = [c_void_p, c_uint16] + self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesShortDiscriminator.argtypes = [ + c_void_p, c_uint16] self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesShortDiscriminator.restype = c_uint32 - self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesVendor.argtypes = [c_void_p, c_uint16] + self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesVendor.argtypes = [ + c_void_p, c_uint16] self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesVendor.restype = c_uint32 - self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesDeviceType.argtypes = [c_void_p, c_uint16] + self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesDeviceType.argtypes = [ + c_void_p, c_uint16] self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesDeviceType.restype = c_uint32 - self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesCommissioningEnabled.argtypes = [c_void_p, c_uint16] + self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesCommissioningEnabled.argtypes = [ + c_void_p, c_uint16] self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesCommissioningEnabled.restype = c_uint32 - self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesCommissioningEnabledFromCommand.argtypes = [c_void_p] + self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesCommissioningEnabledFromCommand.argtypes = [ + c_void_p] self._dmLib.pychip_DeviceController_DiscoverCommissionableNodesCommissioningEnabledFromCommand.restype = c_uint32 - self._dmLib.pychip_DeviceController_PrintDiscoveredDevices.argtypes = [c_void_p] + self._dmLib.pychip_DeviceController_PrintDiscoveredDevices.argtypes = [ + c_void_p] - self._dmLib.pychip_DeviceController_GetIPForDiscoveredDevice.argtypes = [c_void_p, c_int, c_char_p, c_uint32] + self._dmLib.pychip_DeviceController_GetIPForDiscoveredDevice.argtypes = [ + c_void_p, c_int, c_char_p, c_uint32] self._dmLib.pychip_DeviceController_GetIPForDiscoveredDevice.restype = c_bool - self._dmLib.pychip_DeviceController_ConnectIP.argtypes = [c_void_p, c_char_p, c_uint32, c_uint64] + self._dmLib.pychip_DeviceController_ConnectIP.argtypes = [ + c_void_p, c_char_p, c_uint32, c_uint64] self._dmLib.pychip_DeviceController_ConnectIP.restype = c_uint32 - self._dmLib.pychip_DeviceController_CloseSession.argtypes = [c_void_p, c_uint64] + self._dmLib.pychip_DeviceController_CloseSession.argtypes = [ + c_void_p, c_uint64] self._dmLib.pychip_DeviceController_CloseSession.restype = c_uint32 self._dmLib.pychip_DeviceController_GetAddressAndPort.argtypes = [ @@ -435,11 +463,13 @@ def _InitLib(self): c_void_p, c_uint64, _DeviceAvailableFunct] self._dmLib.pychip_GetDeviceByNodeId.restype = c_uint32 - self._dmLib.pychip_DeviceCommissioner_CloseBleConnection.argtypes = [c_void_p] + self._dmLib.pychip_DeviceCommissioner_CloseBleConnection.argtypes = [ + c_void_p] self._dmLib.pychip_DeviceCommissioner_CloseBleConnection.restype = c_uint32 self._dmLib.pychip_GetCommandSenderHandle.argtypes = [c_void_p] self._dmLib.pychip_GetCommandSenderHandle.restype = c_uint64 - self._dmLib.pychip_DeviceController_GetFabricId.argtypes = [c_void_p, POINTER(c_uint64)] + self._dmLib.pychip_DeviceController_GetFabricId.argtypes = [ + c_void_p, POINTER(c_uint64)] self._dmLib.pychip_DeviceController_GetFabricId.restype = c_uint32 diff --git a/src/controller/python/chip/ChipStack.py b/src/controller/python/chip/ChipStack.py index 7ed3be0253d7a1..dc3930cf046ad5 100644 --- a/src/controller/python/chip/ChipStack.py +++ b/src/controller/python/chip/ChipStack.py @@ -47,6 +47,7 @@ ChipStackDLLBaseName = "_ChipDeviceCtrl.so" + def _singleton(cls): instance = [None] @@ -119,6 +120,7 @@ def formatTime(self, record, datefmt=None): timestampStr = "%s.%03ld" % (timestampStr, timestampUS / 1000) return timestampStr + class AsyncCallableHandle: def __init__(self, callback): self._callback = callback @@ -146,9 +148,12 @@ def Wait(self): raise self._exc return self._res + _CompleteFunct = CFUNCTYPE(None, c_void_p, c_void_p) -_ErrorFunct = CFUNCTYPE(None, c_void_p, c_void_p, c_ulong, POINTER(DeviceStatusStruct)) -_LogMessageFunct = CFUNCTYPE(None, c_int64, c_int64, c_char_p, c_uint8, c_char_p) +_ErrorFunct = CFUNCTYPE(None, c_void_p, c_void_p, + c_ulong, POINTER(DeviceStatusStruct)) +_LogMessageFunct = CFUNCTYPE( + None, c_int64, c_int64, c_char_p, c_uint8, c_char_p) _ChipThreadTaskRunnerFunct = CFUNCTYPE(None, py_object) @@ -226,14 +231,16 @@ def HandleChipThreadRun(callback): self.cbHandleChipThreadRun = HandleChipThreadRun self.cbHandleComplete = _CompleteFunct(HandleComplete) self.cbHandleError = _ErrorFunct(HandleError) - self.blockingCB = None # set by other modules(BLE) that require service by thread while thread blocks. + # set by other modules(BLE) that require service by thread while thread blocks. + self.blockingCB = None # Initialize the chip library res = self._ChipStackLib.pychip_Stack_Init() if res != 0: raise self.ErrorToException(res) - res = self._ChipStackLib.pychip_BLEMgrImpl_ConfigureBle(bluetoothAdapter) + res = self._ChipStackLib.pychip_BLEMgrImpl_ConfigureBle( + bluetoothAdapter) if res != 0: raise self.ErrorToException(res) @@ -333,7 +340,8 @@ def PostTaskOnChipThread(self, callFunct) -> AsyncCallableHandle: ''' callObj = AsyncCallableHandle(callFunct) pythonapi.Py_IncRef(py_object(callObj)) - res = self._ChipStackLib.pychip_DeviceController_PostTaskOnChipThread(self.cbHandleChipThreadRun, py_object(callObj)) + res = self._ChipStackLib.pychip_DeviceController_PostTaskOnChipThread( + self.cbHandleChipThreadRun, py_object(callObj)) if res != 0: pythonapi.Py_DecRef(py_object(callObj)) raise self.ErrorToException(res) @@ -350,7 +358,8 @@ def ErrorToException(self, err, devStatusPtr=None): ) ) sysErrorCode = ( - devStatus.SysErrorCode if (devStatus.SysErrorCode != 0) else None + devStatus.SysErrorCode if ( + devStatus.SysErrorCode != 0) else None ) if sysErrorCode != None: msg = msg + " (system err %d)" % (sysErrorCode) @@ -383,7 +392,8 @@ def LocateChipDLL(self): # running script looking for an CHIP build directory containing the Chip Device # Manager DLL. This makes it possible to import and use the ChipDeviceMgr module # directly from a built copy of the CHIP source tree. - buildMachineGlob = "%s-*-%s*" % (platform.machine(), platform.system().lower()) + buildMachineGlob = "%s-*-%s*" % (platform.machine(), + platform.system().lower()) relDMDLLPathGlob = os.path.join( "build", buildMachineGlob, @@ -426,11 +436,14 @@ def _loadLib(self): self._ChipStackLib.pychip_Stack_StatusReportToString.restype = c_char_p self._ChipStackLib.pychip_Stack_ErrorToString.argtypes = [c_uint32] self._ChipStackLib.pychip_Stack_ErrorToString.restype = c_char_p - self._ChipStackLib.pychip_Stack_SetLogFunct.argtypes = [_LogMessageFunct] + self._ChipStackLib.pychip_Stack_SetLogFunct.argtypes = [ + _LogMessageFunct] self._ChipStackLib.pychip_Stack_SetLogFunct.restype = c_uint32 - self._ChipStackLib.pychip_BLEMgrImpl_ConfigureBle.argtypes = [c_uint32] + self._ChipStackLib.pychip_BLEMgrImpl_ConfigureBle.argtypes = [ + c_uint32] self._ChipStackLib.pychip_BLEMgrImpl_ConfigureBle.restype = c_uint32 - self._ChipStackLib.pychip_DeviceController_PostTaskOnChipThread.argtypes = [_ChipThreadTaskRunnerFunct, py_object] + self._ChipStackLib.pychip_DeviceController_PostTaskOnChipThread.argtypes = [ + _ChipThreadTaskRunnerFunct, py_object] self._ChipStackLib.pychip_DeviceController_PostTaskOnChipThread.restype = c_uint32 diff --git a/src/controller/python/chip/__init__.py b/src/controller/python/chip/__init__.py index 7fb82dc1066687..741ceaf6bc4068 100644 --- a/src/controller/python/chip/__init__.py +++ b/src/controller/python/chip/__init__.py @@ -21,4 +21,4 @@ # Provides Python APIs for CHIP. # -"""Provides Python APIs for CHIP.""" \ No newline at end of file +"""Provides Python APIs for CHIP.""" diff --git a/src/controller/python/chip/ble/commissioning/__init__.py b/src/controller/python/chip/ble/commissioning/__init__.py index d8670cbe8dddc7..efc367c4081076 100644 --- a/src/controller/python/chip/ble/commissioning/__init__.py +++ b/src/controller/python/chip/ble/commissioning/__init__.py @@ -22,10 +22,12 @@ TEST_NODE_ID = 11223344 + class PairNotificationType(Enum): - NETWORK_CREDENTIALS = 0 - OPERATIONAL_CREDENTIALS = 1 - COMPLETE = 2 + NETWORK_CREDENTIALS = 0 + OPERATIONAL_CREDENTIALS = 1 + COMPLETE = 2 + @dataclass class _PairNotification: @@ -33,6 +35,7 @@ class _PairNotification: csr: Optional[bytes] error_code: Optional[int] + class Connection: def __init__(self, controller): self.needsNetworkCredentials = False @@ -45,18 +48,16 @@ def __init__(self, controller): def ConnectToWifi(self, ssid: str, password: str): if not self.needsNetworkCredentials: raise Exception("Not requiring network credentials yet.") - + self._controller.PairSendWifiCredentials(ssid, password) self._WaitForPairProgress() def ConnectToThread(self, blob: bytes): if not self.needsNetworkCredentials: raise Exception("Not requiring network credentials yet.") - + self._controller.PairSendThreadCredentials(blob) self._WaitForPairProgress() - - def _Pair(self, discriminator: int, pin: int, deprecated_nodeid: int): """Sets up controller callbakcs and initiates BLE pairing.""" @@ -67,15 +68,17 @@ def _Pair(self, discriminator: int, pin: int, deprecated_nodeid: int): self._controller.BlePair(deprecated_nodeid, pin, discriminator) - def _OnNetworkCredentialsRequested(self): - self._pair_queue.put(_PairNotification(PairNotificationType.NETWORK_CREDENTIALS, None, None)) + self._pair_queue.put(_PairNotification( + PairNotificationType.NETWORK_CREDENTIALS, None, None)) def _OnOperationalCredentialsRequested(self, csr): - self._pair_queue.put(_PairNotification(PairNotificationType.OPERATIONAL_CREDENTIALS, csr, None)) + self._pair_queue.put(_PairNotification( + PairNotificationType.OPERATIONAL_CREDENTIALS, csr, None)) def _OnPairingComplete(self, err): - self._pair_queue.put(_PairNotification(PairNotificationType.COMPLETE, None, err)) + self._pair_queue.put(_PairNotification( + PairNotificationType.COMPLETE, None, err)) def _WaitForPairProgress(self): """Waits for some pairing callback progress. @@ -94,8 +97,9 @@ def _WaitForPairProgress(self): self.paired = step.type == PairNotificationType.COMPLETE if step.type == PairNotificationType.COMPLETE: - if step.error_code != 0: - raise Exception('Pairing ended with error code %d' % step.error_code) + if step.error_code != 0: + raise Exception('Pairing ended with error code %d' % + step.error_code) def _StartAsyncConnection(discriminator: int, pin: int, deprecated_nodeid: Optional[int] = None) -> Connection: @@ -117,7 +121,7 @@ def _StartAsyncConnection(discriminator: int, pin: int, deprecated_nodeid: Optio controller = GetCommissioner() if controller.pairing_state != PairingState.INITIALIZED: - raise Exception("Controller is not ready to start a new pairing") + raise Exception("Controller is not ready to start a new pairing") connection = Connection(controller) connection._Pair(discriminator, pin, deprecated_nodeid) @@ -142,9 +146,8 @@ def Connect(discriminator: int, pin: int, deprecated_nodeid: Optional[int] = Non """ connection = _StartAsyncConnection(discriminator, pin, deprecated_nodeid) connection._WaitForPairProgress() - - return connection + return connection __all__ = [ diff --git a/src/controller/python/chip/ble/get_adapters.py b/src/controller/python/chip/ble/get_adapters.py index 9eb7d26f571060..0f3be727806559 100644 --- a/src/controller/python/chip/ble/get_adapters.py +++ b/src/controller/python/chip/ble/get_adapters.py @@ -5,37 +5,38 @@ @dataclass class AdapterInfo: - index: int - address: str - name: str - alias: str - powered_on: bool + index: int + address: str + name: str + alias: str + powered_on: bool def GetAdapters() -> List[AdapterInfo]: - """Get a list of BLE adapters available on the system. """ - handle = _GetBleLibraryHandle() - - result = [] - nativeList = handle.pychip_ble_adapter_list_new() - if nativeList == 0: - raise Exception('Failed to get BLE adapter list') - - try: - while handle.pychip_ble_adapter_list_next(nativeList): - result.append( - AdapterInfo( - index=handle.pychip_ble_adapter_list_get_index(nativeList), - address=handle.pychip_ble_adapter_list_get_address( - nativeList).decode('utf8'), - name=handle.pychip_ble_adapter_list_get_name(nativeList).decode( - 'utf8'), - alias=handle.pychip_ble_adapter_list_get_alias(nativeList).decode( - 'utf8'), - powered_on=handle.pychip_ble_adapter_list_is_powered(nativeList), - )) - - finally: - handle.pychip_ble_adapter_list_delete(nativeList) - - return result \ No newline at end of file + """Get a list of BLE adapters available on the system. """ + handle = _GetBleLibraryHandle() + + result = [] + nativeList = handle.pychip_ble_adapter_list_new() + if nativeList == 0: + raise Exception('Failed to get BLE adapter list') + + try: + while handle.pychip_ble_adapter_list_next(nativeList): + result.append( + AdapterInfo( + index=handle.pychip_ble_adapter_list_get_index(nativeList), + address=handle.pychip_ble_adapter_list_get_address( + nativeList).decode('utf8'), + name=handle.pychip_ble_adapter_list_get_name(nativeList).decode( + 'utf8'), + alias=handle.pychip_ble_adapter_list_get_alias(nativeList).decode( + 'utf8'), + powered_on=handle.pychip_ble_adapter_list_is_powered( + nativeList), + )) + + finally: + handle.pychip_ble_adapter_list_delete(nativeList) + + return result diff --git a/src/controller/python/chip/ble/library_handle.py b/src/controller/python/chip/ble/library_handle.py index b0fad354eb0652..d2469716adb3b5 100644 --- a/src/controller/python/chip/ble/library_handle.py +++ b/src/controller/python/chip/ble/library_handle.py @@ -20,40 +20,44 @@ from chip.ble.types import DeviceScannedCallback, ScanDoneCallback -# This prevents python auto-casting c_void_p to integers and +# This prevents python auto-casting c_void_p to integers and # auto-casting 32/64 bit values to int/long respectively. Without this -# passing in c_void_p does not see to work well for numbers +# passing in c_void_p does not see to work well for numbers # in [0x80000000; 0xFFFFFFFF] (argument will be auto-cast to 64-bit negative) class VoidPointer(c_void_p): pass def _GetBleLibraryHandle() -> ctypes.CDLL: - """ Get the native library handle with BLE method initialization. - - Retreives the CHIP native library handle and attaches signatures to - native methods. - """ - - handle = chip.native.GetLibraryHandle() - - # Uses one of the type decorators as an indicator for everything being - # initialized. Native methods default to c_int return types - if handle.pychip_ble_adapter_list_new.restype != VoidPointer: - setter = chip.native.NativeLibraryHandleMethodArguments(handle) - - setter.Set('pychip_ble_adapter_list_new', VoidPointer, []) - setter.Set('pychip_ble_adapter_list_next', c_bool, [VoidPointer]) - setter.Set('pychip_ble_adapter_list_get_index', c_uint32, [VoidPointer]) - setter.Set('pychip_ble_adapter_list_get_address', c_char_p, [VoidPointer]) - setter.Set('pychip_ble_adapter_list_get_alias', c_char_p, [VoidPointer]) - setter.Set('pychip_ble_adapter_list_get_name', c_char_p, [VoidPointer]) - setter.Set('pychip_ble_adapter_list_is_powered', c_bool, [VoidPointer]) - setter.Set('pychip_ble_adapter_list_delete', None, [VoidPointer]) - setter.Set('pychip_ble_adapter_list_get_raw_adapter', VoidPointer, [VoidPointer]) - - setter.Set('pychip_ble_start_scanning', VoidPointer, [ - py_object, VoidPointer, c_uint32, DeviceScannedCallback, ScanDoneCallback - ]) - - return handle + """ Get the native library handle with BLE method initialization. + + Retreives the CHIP native library handle and attaches signatures to + native methods. + """ + + handle = chip.native.GetLibraryHandle() + + # Uses one of the type decorators as an indicator for everything being + # initialized. Native methods default to c_int return types + if handle.pychip_ble_adapter_list_new.restype != VoidPointer: + setter = chip.native.NativeLibraryHandleMethodArguments(handle) + + setter.Set('pychip_ble_adapter_list_new', VoidPointer, []) + setter.Set('pychip_ble_adapter_list_next', c_bool, [VoidPointer]) + setter.Set('pychip_ble_adapter_list_get_index', + c_uint32, [VoidPointer]) + setter.Set('pychip_ble_adapter_list_get_address', + c_char_p, [VoidPointer]) + setter.Set('pychip_ble_adapter_list_get_alias', + c_char_p, [VoidPointer]) + setter.Set('pychip_ble_adapter_list_get_name', c_char_p, [VoidPointer]) + setter.Set('pychip_ble_adapter_list_is_powered', c_bool, [VoidPointer]) + setter.Set('pychip_ble_adapter_list_delete', None, [VoidPointer]) + setter.Set('pychip_ble_adapter_list_get_raw_adapter', + VoidPointer, [VoidPointer]) + + setter.Set('pychip_ble_start_scanning', VoidPointer, [ + py_object, VoidPointer, c_uint32, DeviceScannedCallback, ScanDoneCallback + ]) + + return handle diff --git a/src/controller/python/chip/ble/scan_devices.py b/src/controller/python/chip/ble/scan_devices.py index 23126ff60f1dc1..78784e007a9e9c 100644 --- a/src/controller/python/chip/ble/scan_devices.py +++ b/src/controller/python/chip/ble/scan_devices.py @@ -21,68 +21,70 @@ from queue import Queue from chip.ble.types import DeviceScannedCallback, ScanDoneCallback + @DeviceScannedCallback def ScanFoundCallback(closure, address: str, discriminator: int, vendor: int, product: int): - closure.DeviceFound(address, discriminator, vendor, product) + closure.DeviceFound(address, discriminator, vendor, product) @ScanDoneCallback def ScanDoneCallback(closure): - closure.ScanCompleted() + closure.ScanCompleted() def DiscoverAsync(timeoutMs: int, scanCallback, doneCallback, adapter=None): - """Initiate a BLE discovery of devices with the given timeout. - - NOTE: devices are not guaranteed to be unique. New entries are returned - as soon as the underlying BLE manager detects changes. - - Args: - timeoutMs: scan will complete after this time - scanCallback: callback when a device is found - doneCallback: callback when the scan is complete - adapter: what adapter to choose. Either an AdapterInfo object or - a string with the adapter address. If None, the first - adapter on the system is used. - """ - if adapter and not isinstance(adapter, str): - adapter = adapter.address + """Initiate a BLE discovery of devices with the given timeout. + + NOTE: devices are not guaranteed to be unique. New entries are returned + as soon as the underlying BLE manager detects changes. + + Args: + timeoutMs: scan will complete after this time + scanCallback: callback when a device is found + doneCallback: callback when the scan is complete + adapter: what adapter to choose. Either an AdapterInfo object or + a string with the adapter address. If None, the first + adapter on the system is used. + """ + if adapter and not isinstance(adapter, str): + adapter = adapter.address - handle = _GetBleLibraryHandle() + handle = _GetBleLibraryHandle() - nativeList = handle.pychip_ble_adapter_list_new() - if nativeList == 0: - raise Exception('Failed to list available adapters') + nativeList = handle.pychip_ble_adapter_list_new() + if nativeList == 0: + raise Exception('Failed to list available adapters') - try: - while handle.pychip_ble_adapter_list_next(nativeList): - if adapter and (adapter != handle.pychip_ble_adapter_list_get_address( - nativeList).decode('utf8')): - continue + try: + while handle.pychip_ble_adapter_list_next(nativeList): + if adapter and (adapter != handle.pychip_ble_adapter_list_get_address( + nativeList).decode('utf8')): + continue - class ScannerClosure: + class ScannerClosure: - def DeviceFound(self, *args): - scanCallback(*args) + def DeviceFound(self, *args): + scanCallback(*args) - def ScanCompleted(self, *args): - doneCallback(*args) - ctypes.pythonapi.Py_DecRef(ctypes.py_object(self)) + def ScanCompleted(self, *args): + doneCallback(*args) + ctypes.pythonapi.Py_DecRef(ctypes.py_object(self)) - closure = ScannerClosure() - ctypes.pythonapi.Py_IncRef(ctypes.py_object(closure)) + closure = ScannerClosure() + ctypes.pythonapi.Py_IncRef(ctypes.py_object(closure)) - scanner = handle.pychip_ble_start_scanning( - ctypes.py_object(closure), - handle.pychip_ble_adapter_list_get_raw_adapter(nativeList), timeoutMs, - ScanFoundCallback, ScanDoneCallback) + scanner = handle.pychip_ble_start_scanning( + ctypes.py_object(closure), + handle.pychip_ble_adapter_list_get_raw_adapter( + nativeList), timeoutMs, + ScanFoundCallback, ScanDoneCallback) - if scanner == 0: - raise Exception('Failed to initiate scan') - break - finally: - handle.pychip_ble_adapter_list_delete(nativeList) + if scanner == 0: + raise Exception('Failed to initiate scan') + break + finally: + handle.pychip_ble_adapter_list_delete(nativeList) @dataclass @@ -92,6 +94,7 @@ class DeviceInfo: vendor: int product: int + class _DeviceInfoReceiver: """Uses a queue to notify of objects received asynchronously from a ble scan. @@ -99,6 +102,7 @@ class _DeviceInfoReceiver: Internal queue gets filled on DeviceFound and ends with None when ScanCompleted. """ + def __init__(self): self.queue = Queue() @@ -109,27 +113,27 @@ def ScanCompleted(self): self.queue.put(None) +def DiscoverSync(timeoutMs: int, adapter=None) -> Generator[DeviceInfo, None, None]: + """Discover BLE devices over the specified period of time. -def DiscoverSync(timeoutMs: int, adapter = None) -> Generator[DeviceInfo, None, None]: - """Discover BLE devices over the specified period of time. + NOTE: devices are not guaranteed to be unique. New entries are returned + as soon as the underlying BLE manager detects changes. - NOTE: devices are not guaranteed to be unique. New entries are returned - as soon as the underlying BLE manager detects changes. - - Args: - timeoutMs: scan will complete after this time - scanCallback: callback when a device is found - doneCallback: callback when the scan is complete - adapter: what adapter to choose. Either an AdapterInfo object or - a string with the adapter address. If None, the first - adapter on the system is used. - """ + Args: + timeoutMs: scan will complete after this time + scanCallback: callback when a device is found + doneCallback: callback when the scan is complete + adapter: what adapter to choose. Either an AdapterInfo object or + a string with the adapter address. If None, the first + adapter on the system is used. + """ - receiver = _DeviceInfoReceiver() - DiscoverAsync(timeoutMs, receiver.DeviceFound, receiver.ScanCompleted, adapter) + receiver = _DeviceInfoReceiver() + DiscoverAsync(timeoutMs, receiver.DeviceFound, + receiver.ScanCompleted, adapter) - while True: - data = receiver.queue.get() - if not data: - break - yield data + while True: + data = receiver.queue.get() + if not data: + break + yield data diff --git a/src/controller/python/chip/ble/types.py b/src/controller/python/chip/ble/types.py index bc0e11df45a2af..e1e667608e7f39 100644 --- a/src/controller/python/chip/ble/types.py +++ b/src/controller/python/chip/ble/types.py @@ -17,6 +17,6 @@ from ctypes import CFUNCTYPE, py_object, c_char_p, c_uint16 DeviceScannedCallback = CFUNCTYPE(None, py_object, c_char_p, c_uint16, - c_uint16, c_uint16) + c_uint16, c_uint16) ScanDoneCallback = CFUNCTYPE(None, py_object) diff --git a/src/controller/python/chip/discovery/__init__.py b/src/controller/python/chip/discovery/__init__.py index 05bbb0404ea823..e41360414bee8b 100644 --- a/src/controller/python/chip/discovery/__init__.py +++ b/src/controller/python/chip/discovery/__init__.py @@ -24,126 +24,131 @@ from chip.discovery.library_handle import _GetDiscoveryLibraryHandle from chip.discovery.types import DiscoverSuccessCallback_t, DiscoverFailureCallback_t + @dataclass(unsafe_hash=True) class PeerId: - """Represents a remote peer id.""" - fabricId: int - nodeId: int + """Represents a remote peer id.""" + fabricId: int + nodeId: int + @dataclass(unsafe_hash=True) class NodeAddress: - """Represents a distinct address where a node can be reached.""" - interface: int - ip: str - port: int + """Represents a distinct address where a node can be reached.""" + interface: int + ip: str + port: int + @dataclass class AggregatedDiscoveryResults: - """Discovery results for a node.""" - peerId: PeerId - addresses: Set[NodeAddress] + """Discovery results for a node.""" + peerId: PeerId + addresses: Set[NodeAddress] @dataclass class PendingDiscovery: - """Accumulator for ongoing discovery.""" - result: AggregatedDiscoveryResults - callback: Callable[[AggregatedDiscoveryResults], None] - expireTime: int - firstResultTime: int + """Accumulator for ongoing discovery.""" + result: AggregatedDiscoveryResults + callback: Callable[[AggregatedDiscoveryResults], None] + expireTime: int + firstResultTime: int + # Milliseconds to wait for additional results onece a single result has # been received _RESULT_WAIT_TIME_SEC = 0.05 -class _PendingDiscoveries: - """Manages a list of pending discoveries and associated callbacks.""" - - activeDiscoveries: List[PendingDiscovery] = [] - - def __init__(self): - self.operationCondition = threading.Condition() - self.resolution = threading.Thread(target=self.ResolutionThread, daemon=True) - self.resolution.start() - - def Start(self, peerId: PeerId, callback: Callable[[AggregatedDiscoveryResults], None], timeoutMs: int): - """Add a new pending result item to the internal list.""" - with self.operationCondition: - self.activeDiscoveries.append( - PendingDiscovery( - AggregatedDiscoveryResults(peerId, addresses = set()), - callback = callback, - expireTime = time.time() + timeoutMs/1000.0, - firstResultTime = 0, - )) - self.operationCondition.notify() - - def OnSuccess(self, peerId: PeerId, address: NodeAddress): - """Notify of a succesful address resolution.""" - with self.operationCondition: - for item in self.activeDiscoveries: - if item.result.peerId != peerId: - continue - - item.result.addresses.add(address) - if item.firstResultTime == 0: - item.firstResultTime = time.time() - self.operationCondition.notify() - - - def ResolutionThread(self): - while True: - with self.operationCondition: - self.operationCondition.wait(self.ComputeNextEventTimeoutSeconds()) - - updatedDiscoveries = [] +class _PendingDiscoveries: + """Manages a list of pending discoveries and associated callbacks.""" + + activeDiscoveries: List[PendingDiscovery] = [] + + def __init__(self): + self.operationCondition = threading.Condition() + self.resolution = threading.Thread( + target=self.ResolutionThread, daemon=True) + self.resolution.start() + + def Start(self, peerId: PeerId, callback: Callable[[AggregatedDiscoveryResults], None], timeoutMs: int): + """Add a new pending result item to the internal list.""" + with self.operationCondition: + self.activeDiscoveries.append( + PendingDiscovery( + AggregatedDiscoveryResults(peerId, addresses=set()), + callback=callback, + expireTime=time.time() + timeoutMs/1000.0, + firstResultTime=0, + )) + self.operationCondition.notify() + + def OnSuccess(self, peerId: PeerId, address: NodeAddress): + """Notify of a succesful address resolution.""" + with self.operationCondition: + for item in self.activeDiscoveries: + if item.result.peerId != peerId: + continue + + item.result.addresses.add(address) + if item.firstResultTime == 0: + item.firstResultTime = time.time() + self.operationCondition.notify() + + def ResolutionThread(self): + while True: + with self.operationCondition: + self.operationCondition.wait( + self.ComputeNextEventTimeoutSeconds()) + + updatedDiscoveries = [] + + for item in self.activeDiscoveries: + if self.NeedsCallback(item): + try: + item.callback(item.result) + except: + logging.exception("Node discovery callback failed") + else: + updatedDiscoveries.append(item) + + self.activeDiscoveries = updatedDiscoveries + + def NeedsCallback(self, item: PendingDiscovery): + """Find out if the callback needs to be called for the given item.""" + now = time.time() + if item.expireTime <= now: + return True + + if (item.firstResultTime > 0) and (item.firstResultTime + _RESULT_WAIT_TIME_SEC <= now): + return True + + return False + + def ComputeNextEventTimeoutSeconds(self): + """Compute how much a thread needs to sleep based on the active discoveries list.""" + sleepTimeSec = 10.0 # just in case + + now = time.time() for item in self.activeDiscoveries: - if self.NeedsCallback(item): - try: - item.callback(item.result) - except: - logging.exception("Node discovery callback failed") - else: - updatedDiscoveries.append(item) - - self.activeDiscoveries = updatedDiscoveries - - def NeedsCallback(self, item: PendingDiscovery): - """Find out if the callback needs to be called for the given item.""" - now = time.time() - if item.expireTime <= now: - return True - - if (item.firstResultTime > 0) and (item.firstResultTime + _RESULT_WAIT_TIME_SEC <= now): - return True - - return False - - def ComputeNextEventTimeoutSeconds(self): - """Compute how much a thread needs to sleep based on the active discoveries list.""" - sleepTimeSec = 10.0 # just in case - - now = time.time() - for item in self.activeDiscoveries: - # figure out expiry time for an item - expireSleep = item.expireTime - now - if expireSleep < sleepTimeSec: - sleepTimeSec = expireSleep - - # Allow only a short time window for 'additional results' once we - # have one - resultSleep = (item.firstResultTime + _RESULT_WAIT_TIME_SEC) - now - if resultSleep < sleepTimeSec: - sleepTimeSec = resultSleep - - - # in case our processing missed some expire times, set a very short - # sleep - if sleepTimeSec <= 0: - sleepTimeSec = 0.001 - - return sleepTimeSec + # figure out expiry time for an item + expireSleep = item.expireTime - now + if expireSleep < sleepTimeSec: + sleepTimeSec = expireSleep + + # Allow only a short time window for 'additional results' once we + # have one + resultSleep = (item.firstResultTime + _RESULT_WAIT_TIME_SEC) - now + if resultSleep < sleepTimeSec: + sleepTimeSec = resultSleep + + # in case our processing missed some expire times, set a very short + # sleep + if sleepTimeSec <= 0: + sleepTimeSec = 0.001 + + return sleepTimeSec # define firstResultTime @@ -155,18 +160,18 @@ def ComputeNextEventTimeoutSeconds(self): @DiscoverSuccessCallback_t def _DiscoverSuccess(fabric: int, node: int, interface: int, ip: str, port: int): - peerId = PeerId(fabric, node) - address = NodeAddress(interface, ip, port) + peerId = PeerId(fabric, node) + address = NodeAddress(interface, ip, port) - global _gPendingDiscoveries - _gPendingDiscoveries.OnSuccess(peerId, address) + global _gPendingDiscoveries + _gPendingDiscoveries.OnSuccess(peerId, address) @DiscoverFailureCallback_t def _DiscoverFailure(fabric: int, node: int, errorCode: int): - # Many discovery errors currently do not include a useful node/fabric id - # hence we just log and rely on discovery timeouts to return 'no data' - logging.error("Discovery failure, error %d", errorCode) + # Many discovery errors currently do not include a useful node/fabric id + # hence we just log and rely on discovery timeouts to return 'no data' + logging.error("Discovery failure, error %d", errorCode) def FindAddressAsync(fabricid: int, nodeid: int, callback, timeout_ms=1000): @@ -178,36 +183,39 @@ def FindAddressAsync(fabricid: int, nodeid: int, callback, timeout_ms=1000): callback: Will be called once node resolution completes. """ - _GetDiscoveryLibraryHandle().pychip_discovery_set_callbacks(_DiscoverSuccess, _DiscoverFailure) + _GetDiscoveryLibraryHandle().pychip_discovery_set_callbacks( + _DiscoverSuccess, _DiscoverFailure) global _gPendingDiscoveries _gPendingDiscoveries.Start( - PeerId(fabricid, nodeid), - callback, - timeout_ms + PeerId(fabricid, nodeid), + callback, + timeout_ms ) res = _GetDiscoveryLibraryHandle().pychip_discovery_resolve(fabricid, nodeid) if res != 0: raise Exception("Failed to start node resolution") + class _SyncAddressFinder: - """Performs a blocking wait for an address resolution operation.""" - def __init__(self): - self.semaphore = threading.Semaphore(value=0) - self.result = None + """Performs a blocking wait for an address resolution operation.""" + + def __init__(self): + self.semaphore = threading.Semaphore(value=0) + self.result = None - def Callback(self, data): - self.result = data - self.semaphore.release() + def Callback(self, data): + self.result = data + self.semaphore.release() - def WaitForResult(self): - self.semaphore.acquire() - return self.result + def WaitForResult(self): + self.semaphore.acquire() + return self.result def FindAddress(fabricid, nodeid, timeout_ms=1000): - """Performs an address discovery for a node and returns the result.""" - finder = _SyncAddressFinder() - FindAddressAsync(fabricid, nodeid, finder.Callback, timeout_ms) - return finder.WaitForResult() + """Performs an address discovery for a node and returns the result.""" + finder = _SyncAddressFinder() + FindAddressAsync(fabricid, nodeid, finder.Callback, timeout_ms) + return finder.WaitForResult() diff --git a/src/controller/python/chip/discovery/library_handle.py b/src/controller/python/chip/discovery/library_handle.py index bf6ddb39b165b2..c4eefbd2fee55b 100644 --- a/src/controller/python/chip/discovery/library_handle.py +++ b/src/controller/python/chip/discovery/library_handle.py @@ -19,21 +19,23 @@ from ctypes import c_void_p, c_int32, c_uint32, c_uint64 from chip.discovery.types import DiscoverSuccessCallback_t, DiscoverFailureCallback_t + def _GetDiscoveryLibraryHandle() -> ctypes.CDLL: - """ Get the native library handle with discovery methods initialized. + """ Get the native library handle with discovery methods initialized. - Retreives the CHIP native library handle and attaches signatures to - native methods. - """ + Retreives the CHIP native library handle and attaches signatures to + native methods. + """ - handle = chip.native.GetLibraryHandle() + handle = chip.native.GetLibraryHandle() - # Uses one of the type decorators as an indicator for everything being - # initialized. - if not handle.pychip_discovery_resolve.argtypes: - setter = chip.native.NativeLibraryHandleMethodArguments(handle) + # Uses one of the type decorators as an indicator for everything being + # initialized. + if not handle.pychip_discovery_resolve.argtypes: + setter = chip.native.NativeLibraryHandleMethodArguments(handle) - setter.Set('pychip_discovery_resolve', c_uint32, [c_uint64, c_uint64]) - setter.Set('pychip_discovery_set_callbacks', None, [DiscoverSuccessCallback_t, DiscoverFailureCallback_t]) + setter.Set('pychip_discovery_resolve', c_uint32, [c_uint64, c_uint64]) + setter.Set('pychip_discovery_set_callbacks', None, [ + DiscoverSuccessCallback_t, DiscoverFailureCallback_t]) - return handle + return handle diff --git a/src/controller/python/chip/discovery/types.py b/src/controller/python/chip/discovery/types.py index fc433cd5ec9ff4..775c2420559bbf 100644 --- a/src/controller/python/chip/discovery/types.py +++ b/src/controller/python/chip/discovery/types.py @@ -19,16 +19,16 @@ DiscoverSuccessCallback_t = CFUNCTYPE( None, # void return - c_uint64, # fabric id - c_uint64, # node id - c_uint32, # interface id - c_char_p, # IP address + c_uint64, # fabric id + c_uint64, # node id + c_uint32, # interface id + c_char_p, # IP address c_uint16 # port ) DiscoverFailureCallback_t = CFUNCTYPE( None, # void return - c_uint64, # fabric id - c_uint64, # node id - c_uint32, # CHIP_ERROR error code + c_uint64, # fabric id + c_uint64, # node id + c_uint32, # CHIP_ERROR error code ) diff --git a/src/controller/python/chip/interaction_model/__init__.py b/src/controller/python/chip/interaction_model/__init__.py index 76b26ecc371ce7..881d2e772380e5 100644 --- a/src/controller/python/chip/interaction_model/__init__.py +++ b/src/controller/python/chip/interaction_model/__init__.py @@ -22,4 +22,4 @@ """Provides Python APIs for CHIP.""" -__all__ = [ "IMDelegate" ] +__all__ = ["IMDelegate"] diff --git a/src/controller/python/chip/interaction_model/delegate.py b/src/controller/python/chip/interaction_model/delegate.py index e66f8c53065002..ce3b86524319a6 100644 --- a/src/controller/python/chip/interaction_model/delegate.py +++ b/src/controller/python/chip/interaction_model/delegate.py @@ -27,7 +27,7 @@ # The type should match CommandStatus in interaction_model/Delegate.h # CommandStatus should not contain padding IMCommandStatus = Struct( - "ProtocolId" / Int32ul, + "ProtocolId" / Int32ul, "ProtocolCode" / Int16ul, "EndpointId" / Int16ul, "ClusterId" / Int32ul, @@ -50,12 +50,14 @@ class AttributePath: clusterId: int attributeId: int + @dataclass class AttributeReadResult: path: AttributePath status: int value: 'typing.Any' + # typedef void (*PythonInteractionModelDelegate_OnCommandResponseStatusCodeReceivedFunct)(uint64_t commandSenderPtr, # void * commandStatusBuf); # typedef void (*PythonInteractionModelDelegate_OnCommandResponseProtocolErrorFunct)(uint64_t commandSenderPtr, uint8_t commandIndex); @@ -63,10 +65,12 @@ class AttributeReadResult: # typedef void (*PythonInteractionModelDelegate_OnReportDataFunct)(chip::NodeId nodeId, uint64_t readClientAppIdentifier, # void * attributePathBuf, size_t attributePathBufLen, # uint8_t * readTlvData, size_t readTlvDataLen, uint16_t statusCode); -_OnCommandResponseStatusCodeReceivedFunct = CFUNCTYPE(None, c_uint64, c_void_p, c_uint32) +_OnCommandResponseStatusCodeReceivedFunct = CFUNCTYPE( + None, c_uint64, c_void_p, c_uint32) _OnCommandResponseProtocolErrorFunct = CFUNCTYPE(None, c_uint64, c_uint8) _OnCommandResponseFunct = CFUNCTYPE(None, c_uint64, c_uint32) -_OnReportDataFunct = CFUNCTYPE(None, c_uint64, c_ssize_t, c_void_p, c_uint32, c_void_p, c_uint32, c_uint16) +_OnReportDataFunct = CFUNCTYPE( + None, c_uint64, c_ssize_t, c_void_p, c_uint32, c_void_p, c_uint32, c_uint16) _commandStatusDict = dict() _commandIndexStatusDict = dict() @@ -80,36 +84,46 @@ class AttributeReadResult: PLACEHOLDER_COMMAND_HANDLE = 1 DEFAULT_ATTRIBUTEREAD_APPID = 0 + def _GetCommandStatus(commandHandle: int): with _commandStatusLock: return _commandStatusDict.get(commandHandle, None) + def _GetCommandIndexStatus(commandHandle: int, commandIndex: int): with _commandStatusLock: indexDict = _commandIndexStatusDict.get(commandHandle, {}) return indexDict.get(commandIndex, None) + def _SetCommandStatus(commandHandle: int, val): with _commandStatusLock: _commandStatusDict[commandHandle] = val _commandStatusCV.notify_all() + def _SetCommandIndexStatus(commandHandle: int, commandIndex: int, status): with _commandStatusLock: - print("SetCommandIndexStatus commandHandle={} commandIndex={}".format(commandHandle, commandIndex)) + print("SetCommandIndexStatus commandHandle={} commandIndex={}".format( + commandHandle, commandIndex)) indexDict = _commandIndexStatusDict.get(commandHandle, {}) indexDict[commandIndex] = status _commandIndexStatusDict[commandHandle] = indexDict + @_OnCommandResponseStatusCodeReceivedFunct def _OnCommandResponseStatusCodeReceived(commandHandle: int, IMCommandStatusBuf, IMCommandStatusBufLen): - status = IMCommandStatus.parse(ctypes.string_at(IMCommandStatusBuf, IMCommandStatusBufLen)) - _SetCommandIndexStatus(PLACEHOLDER_COMMAND_HANDLE, status["CommandIndex"], status) + status = IMCommandStatus.parse(ctypes.string_at( + IMCommandStatusBuf, IMCommandStatusBufLen)) + _SetCommandIndexStatus(PLACEHOLDER_COMMAND_HANDLE, + status["CommandIndex"], status) + @_OnCommandResponseProtocolErrorFunct def _OnCommandResponseProtocolError(commandHandle: int, errorcode: int): pass + @_OnCommandResponseFunct def _OnCommandResponse(commandHandle: int, errorcode: int): _SetCommandStatus(PLACEHOLDER_COMMAND_HANDLE, errorcode) @@ -135,20 +149,31 @@ def _OnReportData(nodeId: int, appId: int, attrPathBuf, attrPathBufLen: int, tlv _attributeDict[appId] = AttributeReadResult( path, statusCode, tlvData) + def InitIMDelegate(): handle = chip.native.GetLibraryHandle() if not handle.pychip_InteractionModelDelegate_SetCommandResponseStatusCallback.argtypes: setter = chip.native.NativeLibraryHandleMethodArguments(handle) - setter.Set("pychip_InteractionModelDelegate_SetCommandResponseStatusCallback", None, [_OnCommandResponseStatusCodeReceivedFunct]) - setter.Set("pychip_InteractionModelDelegate_SetCommandResponseProtocolErrorCallback", None, [_OnCommandResponseProtocolErrorFunct]) - setter.Set("pychip_InteractionModelDelegate_SetCommandResponseErrorCallback", None, [_OnCommandResponseFunct]) - setter.Set("pychip_InteractionModel_GetCommandSenderHandle", c_uint32, [ctypes.POINTER(c_uint64)]) - setter.Set("pychip_InteractionModelDelegate_SetOnReportDataCallback", None, [_OnReportDataFunct]) + setter.Set("pychip_InteractionModelDelegate_SetCommandResponseStatusCallback", None, [ + _OnCommandResponseStatusCodeReceivedFunct]) + setter.Set("pychip_InteractionModelDelegate_SetCommandResponseProtocolErrorCallback", None, [ + _OnCommandResponseProtocolErrorFunct]) + setter.Set("pychip_InteractionModelDelegate_SetCommandResponseErrorCallback", None, [ + _OnCommandResponseFunct]) + setter.Set("pychip_InteractionModel_GetCommandSenderHandle", + c_uint32, [ctypes.POINTER(c_uint64)]) + setter.Set("pychip_InteractionModelDelegate_SetOnReportDataCallback", None, [ + _OnReportDataFunct]) + + handle.pychip_InteractionModelDelegate_SetCommandResponseStatusCallback( + _OnCommandResponseStatusCodeReceived) + handle.pychip_InteractionModelDelegate_SetCommandResponseProtocolErrorCallback( + _OnCommandResponseProtocolError) + handle.pychip_InteractionModelDelegate_SetCommandResponseErrorCallback( + _OnCommandResponse) + handle.pychip_InteractionModelDelegate_SetOnReportDataCallback( + _OnReportData) - handle.pychip_InteractionModelDelegate_SetCommandResponseStatusCallback(_OnCommandResponseStatusCodeReceived) - handle.pychip_InteractionModelDelegate_SetCommandResponseProtocolErrorCallback(_OnCommandResponseProtocolError) - handle.pychip_InteractionModelDelegate_SetCommandResponseErrorCallback(_OnCommandResponse) - handle.pychip_InteractionModelDelegate_SetOnReportDataCallback(_OnReportData) def ClearCommandStatus(commandHandle: int): """ @@ -158,6 +183,7 @@ def ClearCommandStatus(commandHandle: int): _SetCommandStatus(commandHandle, None) _commandIndexStatusDict[commandHandle] = {} + def WaitCommandStatus(commandHandle: int): """ Wait for response from device, returns error code. @@ -175,6 +201,7 @@ def WaitCommandStatus(commandHandle: int): ret = _GetCommandStatus(commandHandle) return ret + def WaitCommandIndexStatus(commandHandle: int, commandIndex: int): """ Wait for response of particular command from device, returns error code and struct of response info. @@ -189,15 +216,18 @@ def WaitCommandIndexStatus(commandHandle: int, commandIndex: int): err = WaitCommandStatus(commandHandle) return (err, _GetCommandIndexStatus(commandHandle, commandIndex)) -def GetCommandSenderHandle()->int: + +def GetCommandSenderHandle() -> int: handle = chip.native.GetLibraryHandle() resPointer = c_uint64() - res = handle.pychip_InteractionModel_GetCommandSenderHandle(ctypes.pointer(resPointer)) + res = handle.pychip_InteractionModel_GetCommandSenderHandle( + ctypes.pointer(resPointer)) if res != 0: raise chip.exceptions.ChipStackError(res) ClearCommandStatus(resPointer.value) return resPointer.value + def GetAttributeReadResponse(appId: int) -> AttributeReadResult: with _attributeDictLock: return _attributeDict.get(appId, None) diff --git a/src/controller/python/chip/internal/__init__.py b/src/controller/python/chip/internal/__init__.py index d8e768686b02fd..9e77c7fe139dc7 100644 --- a/src/controller/python/chip/internal/__init__.py +++ b/src/controller/python/chip/internal/__init__.py @@ -15,7 +15,7 @@ # # Classes and methods in this module are generally to be used internally -# by the BLE constructs and are not meant as a public API. +# by the BLE constructs and are not meant as a public API. from chip.internal.commissioner import GetCommissioner diff --git a/src/controller/python/chip/internal/commissioner.py b/src/controller/python/chip/internal/commissioner.py index 07b004bfe0278c..81450329347de8 100644 --- a/src/controller/python/chip/internal/commissioner.py +++ b/src/controller/python/chip/internal/commissioner.py @@ -23,27 +23,35 @@ # Not using c_void_p directly is IMPORTANT. Python auto-casts c_void_p # to intergers and this can cause 32/64 bit issues. + + class Commissioner_p(ctypes.c_void_p): pass + class ThreadBlob_p(ctypes.c_void_p): pass + @NetworkCredentialsRequested def OnNetworkCredentialsRequested(): GetCommissioner()._OnNetworkCredentialsRequested() + @OperationalCredentialsRequested def OnOperationalCredentialsRequested(csr, csr_length): - GetCommissioner()._OnOperationalCredentialsRequested(ctypes.string_at(csr, csr_length)) + GetCommissioner()._OnOperationalCredentialsRequested( + ctypes.string_at(csr, csr_length)) + @PairingComplete def OnPairingComplete(err: int): GetCommissioner()._OnPairingComplete(err) + class PairingState(Enum): """States throughout a pairing flow. - + Devices generally go through: initialized -> pairing -> netcreds -> opcreds -> done (initialized) @@ -57,12 +65,12 @@ class PairingState(Enum): class Commissioner: """Commissioner wraps the DeviceCommissioner native class. - + The commissioner is a DeviceController that supports pairing. Since the device controller supports multiple devices, this class is expected to be used as a singleton - + """ def __init__(self, handle: ctypes.CDLL, native: Commissioner_p): @@ -71,23 +79,24 @@ def __init__(self, handle: ctypes.CDLL, native: Commissioner_p): self.pairing_state = PairingState.INITIALIZED self.on_pairing_complete = None - def BlePair(self, remoteDeviceId: int, pinCode: int, discriminator: int): - result = self._handle.pychip_internal_Commissioner_BleConnectForPairing(self._native, remoteDeviceId, pinCode, discriminator) - if result != 0: + result = self._handle.pychip_internal_Commissioner_BleConnectForPairing( + self._native, remoteDeviceId, pinCode, discriminator) + if result != 0: raise Exception("Failed to pair. CHIP Error code %d" % result) self.pairing_state = PairingState.PAIRING - + def Unpair(self, remoteDeviceId: int): - result = self._handle.pychip_internal_Commissioner_Unpair(self._native, remoteDeviceId) - if result != 0: + result = self._handle.pychip_internal_Commissioner_Unpair( + self._native, remoteDeviceId) + if result != 0: raise Exception("Failed to unpair. CHIP Error code %d" % result) def _OnPairingComplete(self, err: int): self.pairing_state = PairingState.INITIALIZED if self.on_pairing_complete: - self.on_pairing_complete(err) + self.on_pairing_complete(err) def _SetNativeCallSignatues(handle: ctypes.CDLL): @@ -95,13 +104,18 @@ def _SetNativeCallSignatues(handle: ctypes.CDLL): setter = NativeLibraryHandleMethodArguments(handle) setter.Set('pychip_internal_Commissioner_New', Commissioner_p, [c_uint64]) - setter.Set('pychip_internal_Commissioner_Unpair', c_uint32, [Commissioner_p, c_uint64]) - setter.Set('pychip_internal_Commissioner_BleConnectForPairing', c_uint32, [Commissioner_p, c_uint64, c_uint32, c_uint16]) + setter.Set('pychip_internal_Commissioner_Unpair', + c_uint32, [Commissioner_p, c_uint64]) + setter.Set('pychip_internal_Commissioner_BleConnectForPairing', + c_uint32, [Commissioner_p, c_uint64, c_uint32, c_uint16]) + + setter.Set('pychip_internal_PairingDelegate_SetPairingCompleteCallback', None, [ + PairingComplete]) - setter.Set('pychip_internal_PairingDelegate_SetPairingCompleteCallback', None, [PairingComplete]) commissionerSingleton: Optional[Commissioner] = None + def GetCommissioner() -> Commissioner: """Gets a reference to the global commissioner singleton. @@ -116,11 +130,11 @@ def GetCommissioner() -> Commissioner: native = handle.pychip_internal_Commissioner_New(GetLocalNodeId()) if not native: - raise Exception('Failed to create commissioner object.') + raise Exception('Failed to create commissioner object.') - handle.pychip_internal_PairingDelegate_SetPairingCompleteCallback(OnPairingComplete) + handle.pychip_internal_PairingDelegate_SetPairingCompleteCallback( + OnPairingComplete) commissionerSingleton = Commissioner(handle, native) - return commissionerSingleton diff --git a/src/controller/python/chip/internal/thread.py b/src/controller/python/chip/internal/thread.py index 4e7d84657cf375..e1af2b0a69ea6b 100644 --- a/src/controller/python/chip/internal/thread.py +++ b/src/controller/python/chip/internal/thread.py @@ -30,4 +30,3 @@ "NetworkName" / PaddedString(17, 'utf8'), "Channel" / Byte, ) - diff --git a/src/controller/python/chip/logging/library_handle.py b/src/controller/python/chip/logging/library_handle.py index f642b9b65897cb..5dadeb2cbe4f2b 100644 --- a/src/controller/python/chip/logging/library_handle.py +++ b/src/controller/python/chip/logging/library_handle.py @@ -21,19 +21,20 @@ def _GetLoggingLibraryHandle() -> ctypes.CDLL: - """ Get the native library handle with logging method initialization. + """ Get the native library handle with logging method initialization. - Retreives the CHIP native library handle and attaches signatures to - native methods. - """ + Retreives the CHIP native library handle and attaches signatures to + native methods. + """ - handle = chip.native.GetLibraryHandle() + handle = chip.native.GetLibraryHandle() - # Uses one of the type decorators as an indicator for everything being - # initialized. - if not handle.pychip_logging_set_callback.argtypes: - setter = chip.native.NativeLibraryHandleMethodArguments(handle) + # Uses one of the type decorators as an indicator for everything being + # initialized. + if not handle.pychip_logging_set_callback.argtypes: + setter = chip.native.NativeLibraryHandleMethodArguments(handle) - setter.Set('pychip_logging_set_callback', c_void_p, [LogRedirectCallback_t]) + setter.Set('pychip_logging_set_callback', + c_void_p, [LogRedirectCallback_t]) - return handle + return handle diff --git a/src/controller/python/chip/native/__init__.py b/src/controller/python/chip/native/__init__.py index 4c3c6fa99355fb..19ad4466ef8220 100644 --- a/src/controller/python/chip/native/__init__.py +++ b/src/controller/python/chip/native/__init__.py @@ -7,79 +7,79 @@ def _AllDirsToRoot(dir): - """Return all parent paths of a directory.""" - dir = os.path.abspath(dir) - while True: - yield dir - parent = os.path.dirname(dir) - if parent == "" or parent == dir: - break - dir = parent + """Return all parent paths of a directory.""" + dir = os.path.abspath(dir) + while True: + yield dir + parent = os.path.dirname(dir) + if parent == "" or parent == dir: + break + dir = parent def FindNativeLibraryPath() -> str: - """Find the native CHIP dll/so path.""" - - scriptDir = os.path.dirname(os.path.abspath(__file__)) - - # When properly installed in the chip package, the Chip Device Manager DLL will - # be located in the package root directory, along side the package's - # modules. - dmDLLPath = os.path.join( - os.path.dirname(scriptDir), # file should be inside 'chip' - NATIVE_LIBRARY_BASE_NAME) - if os.path.exists(dmDLLPath): - return dmDLLPath - - # For the convenience of developers, search the list of parent paths relative to the - # running script looking for an CHIP build directory containing the Chip Device - # Manager DLL. This makes it possible to import and use the ChipDeviceMgr module - # directly from a built copy of the CHIP source tree. - buildMachineGlob = "%s-*-%s*" % (platform.machine(), - platform.system().lower()) - relDMDLLPathGlob = os.path.join( - "build", - buildMachineGlob, - "src/controller/python/.libs", - NATIVE_LIBRARY_BASE_NAME, - ) - for dir in _AllDirsToRoot(scriptDir): - dmDLLPathGlob = os.path.join(dir, relDMDLLPathGlob) - for dmDLLPath in glob.glob(dmDLLPathGlob): - if os.path.exists(dmDLLPath): + """Find the native CHIP dll/so path.""" + + scriptDir = os.path.dirname(os.path.abspath(__file__)) + + # When properly installed in the chip package, the Chip Device Manager DLL will + # be located in the package root directory, along side the package's + # modules. + dmDLLPath = os.path.join( + os.path.dirname(scriptDir), # file should be inside 'chip' + NATIVE_LIBRARY_BASE_NAME) + if os.path.exists(dmDLLPath): return dmDLLPath - raise Exception( - "Unable to locate Chip Device Manager DLL (%s); expected location: %s" % - (NATIVE_LIBRARY_BASE_NAME, scriptDir)) + # For the convenience of developers, search the list of parent paths relative to the + # running script looking for an CHIP build directory containing the Chip Device + # Manager DLL. This makes it possible to import and use the ChipDeviceMgr module + # directly from a built copy of the CHIP source tree. + buildMachineGlob = "%s-*-%s*" % (platform.machine(), + platform.system().lower()) + relDMDLLPathGlob = os.path.join( + "build", + buildMachineGlob, + "src/controller/python/.libs", + NATIVE_LIBRARY_BASE_NAME, + ) + for dir in _AllDirsToRoot(scriptDir): + dmDLLPathGlob = os.path.join(dir, relDMDLLPathGlob) + for dmDLLPath in glob.glob(dmDLLPathGlob): + if os.path.exists(dmDLLPath): + return dmDLLPath + + raise Exception( + "Unable to locate Chip Device Manager DLL (%s); expected location: %s" % + (NATIVE_LIBRARY_BASE_NAME, scriptDir)) class NativeLibraryHandleMethodArguments: - """Convenience wrapper to set native method argtype and restype for methods.""" + """Convenience wrapper to set native method argtype and restype for methods.""" - def __init__(self, handle): - self.handle = handle + def __init__(self, handle): + self.handle = handle - def Set(self, methodName: str, resultType, argumentTypes: list): - method = getattr(self.handle, methodName) - method.restype = resultType - method.argtype = argumentTypes + def Set(self, methodName: str, resultType, argumentTypes: list): + method = getattr(self.handle, methodName) + method.restype = resultType + method.argtype = argumentTypes _nativeLibraryHandle: ctypes.CDLL = None def GetLibraryHandle() -> ctypes.CDLL: - """Get a memoized handle to the chip native code dll.""" + """Get a memoized handle to the chip native code dll.""" - global _nativeLibraryHandle - if _nativeLibraryHandle is None: - _nativeLibraryHandle = ctypes.CDLL(FindNativeLibraryPath()) + global _nativeLibraryHandle + if _nativeLibraryHandle is None: + _nativeLibraryHandle = ctypes.CDLL(FindNativeLibraryPath()) - setter = NativeLibraryHandleMethodArguments(_nativeLibraryHandle) + setter = NativeLibraryHandleMethodArguments(_nativeLibraryHandle) - setter.Set("pychip_native_init", None, []) + setter.Set("pychip_native_init", None, []) - _nativeLibraryHandle.pychip_native_init() + _nativeLibraryHandle.pychip_native_init() - return _nativeLibraryHandle + return _nativeLibraryHandle diff --git a/src/controller/python/chip/setup_payload/setup_payload.py b/src/controller/python/chip/setup_payload/setup_payload.py index 107ab1f23f6f8c..63dad272ed0cab 100644 --- a/src/controller/python/chip/setup_payload/setup_payload.py +++ b/src/controller/python/chip/setup_payload/setup_payload.py @@ -19,6 +19,7 @@ from ctypes import CFUNCTYPE, c_char_p, c_int32, c_uint8 + class SetupPayload: # AttributeVisitor: void(const char* name, const char* value) AttributeVisitor = CFUNCTYPE(None, c_char_p, c_char_p) @@ -34,11 +35,13 @@ def __init__(self): def AddAttribute(name, value): self.attributes[name.decode()] = value.decode() + def AddVendorAttribute(tag, value): self.vendor_attributes[tag] = value.decode() self.attribute_visitor = SetupPayload.AttributeVisitor(AddAttribute) - self.vendor_attribute_visitor = SetupPayload.VendorAttributeVisitor(AddVendorAttribute) + self.vendor_attribute_visitor = SetupPayload.VendorAttributeVisitor( + AddVendorAttribute) def ParseQrCode(self, qrCode: str): self.Clear() @@ -64,7 +67,8 @@ def ParseManualPairingCode(self, manualPairingCode: str): def PrintOnboardingCodes(self, passcode, vendorId, productId, discriminator, customFlow, capabilities, version): self.Clear() - err = self.chipLib.pychip_SetupPayload_PrintOnboardingCodes(passcode, vendorId, productId, discriminator, customFlow, capabilities, version) + err = self.chipLib.pychip_SetupPayload_PrintOnboardingCodes( + passcode, vendorId, productId, discriminator, customFlow, capabilities, version) if err != 0: raise ChipStackError(err) @@ -76,7 +80,8 @@ def Print(self): print(f"{name}: {value}{decorated_value}") for tag in self.vendor_attributes: - print(f"Vendor attribute '{tag:>3}': {self.vendor_attributes[tag]}") + print( + f"Vendor attribute '{tag:>3}': {self.vendor_attributes[tag]}") def Clear(self): self.attributes.clear() @@ -85,9 +90,12 @@ def Clear(self): def __DecorateValue(self, name, value): if name == "RendezvousInformation": rendezvous_methods = [] - if int(value) & 0b001: rendezvous_methods += ["SoftAP"] - if int(value) & 0b010: rendezvous_methods += ["BLE"] - if int(value) & 0b100: rendezvous_methods += ["OnNetwork"] + if int(value) & 0b001: + rendezvous_methods += ["SoftAP"] + if int(value) & 0b010: + rendezvous_methods += ["BLE"] + if int(value) & 0b100: + rendezvous_methods += ["OnNetwork"] return ', '.join(rendezvous_methods) return None @@ -104,4 +112,4 @@ def __InitNativeFunctions(self, chipLib): [c_char_p, SetupPayload.AttributeVisitor, SetupPayload.VendorAttributeVisitor]) setter.Set("pychip_SetupPayload_PrintOnboardingCodes", c_int32, - [c_uint32, c_uint16, c_uint16, c_uint16, uint8_t, uint8_t, uint8_t]) \ No newline at end of file + [c_uint32, c_uint16, c_uint16, c_uint16, uint8_t, uint8_t, uint8_t]) diff --git a/src/controller/python/chip/tlv/__init__.py b/src/controller/python/chip/tlv/__init__.py index bf74c34534dc00..e1d4d0a3a23a0f 100644 --- a/src/controller/python/chip/tlv/__init__.py +++ b/src/controller/python/chip/tlv/__init__.py @@ -192,7 +192,8 @@ def put(self, tag, val): self.startStructure(tag) if type(val) == dict: val = OrderedDict( - sorted(val.items(), key=lambda item: tlvTagToSortKey(item[0])) + sorted(val.items(), + key=lambda item: tlvTagToSortKey(item[0])) ) for containedTag, containedVal in val.items(): self.put(containedTag, containedVal) @@ -320,12 +321,14 @@ def _encodeControlAndTag(self, type, tag, lenOfLenOrVal=0): and len(self._containerStack) != 0 and self._containerStack[0] == TLV_TYPE_STRUCTURE ): - raise ValueError("Attempt to encode anonymous tag within TLV structure") + raise ValueError( + "Attempt to encode anonymous tag within TLV structure") controlByte |= TLV_TAG_CONTROL_ANONYMOUS return struct.pack(" UINT8_MAX: - raise ValueError("Context-specific TLV tag number out of range") + raise ValueError( + "Context-specific TLV tag number out of range") if len(self._containerStack) == 0: raise ValueError( "Attempt to encode context-specific TLV tag at top level" @@ -420,7 +423,8 @@ def get(self): return out def _decodeControlByte(self, tlv, decoding): - (controlByte,) = struct.unpack(" 0 and endOfEncoding == False: + while len(tlv[self._bytesRead:]) > 0 and endOfEncoding == False: decoding = {} self._decodeControlAndTag(tlv, decoding) self._decodeStrLength(tlv, decoding) diff --git a/src/controller/python/test/test_scripts/base.py b/src/controller/python/test/test_scripts/base.py index 061c53301383f7..dc5a57ce9dfca5 100644 --- a/src/controller/python/test/test_scripts/base.py +++ b/src/controller/python/test/test_scripts/base.py @@ -89,7 +89,8 @@ def TestCloseSession(self, nodeid: int): self.devCtrl.CloseSession(nodeid) return True except Exception as ex: - self.logger.exception(f"Failed to close sessions with device {nodeid}: {ex}") + self.logger.exception( + f"Failed to close sessions with device {nodeid}: {ex}") return False def TestNetworkCommissioning(self, nodeid: int, endpoint: int, group: int, dataset: str, network_id: str): @@ -161,7 +162,8 @@ def TestReadBasicAttribiutes(self, nodeid: int, endpoint: int, group: int): endpoint=endpoint, groupid=group) if res is None: - raise Exception("Read {} attribute: no value get".format(basic_attr)) + raise Exception( + "Read {} attribute: no value get".format(basic_attr)) elif res.status != 0: raise Exception( "Read {} attribute: non-zero status code {}".format(basic_attr, res.status)) diff --git a/src/lib/asn1/gen_asn1oid.py b/src/lib/asn1/gen_asn1oid.py index 6bed9750768146..fd59ea996c86fb 100755 --- a/src/lib/asn1/gen_asn1oid.py +++ b/src/lib/asn1/gen_asn1oid.py @@ -31,49 +31,50 @@ import optparse import sys + def identity(n): return n # OID labels -ansi_X9_62 = identity -certicom = identity -characteristicTwo = identity -chip = identity -curve = identity -curves = identity -digest_algorithm = identity -dod = identity -ds = identity -enterprise = identity -organization = identity -internet = identity -iso = identity -itu_t = identity -joint_iso_ccitt = identity -keyType = identity -mechanisms = identity -member_body = identity -pkcs1 = identity -pkcs = identity -pkix = identity -prime = identity -private = identity -rsadsi = identity -schemes = identity -security = identity -signatures = identity -us = identity -zigbee = identity +ansi_X9_62 = identity +certicom = identity +characteristicTwo = identity +chip = identity +curve = identity +curves = identity +digest_algorithm = identity +dod = identity +ds = identity +enterprise = identity +organization = identity +internet = identity +iso = identity +itu_t = identity +joint_iso_ccitt = identity +keyType = identity +mechanisms = identity +member_body = identity +pkcs1 = identity +pkcs = identity +pkix = identity +prime = identity +private = identity +rsadsi = identity +schemes = identity +security = identity +signatures = identity +us = identity +zigbee = identity # OID Categories oidCategories = [ - ( "PubKeyAlgo", 0x0100 ), - ( "SigAlgo", 0x0200 ), - ( "AttributeType", 0x0300 ), - ( "EllipticCurve", 0x0400 ), - ( "Extension", 0x0500 ), - ( "KeyPurpose", 0x0600 ) + ("PubKeyAlgo", 0x0100), + ("SigAlgo", 0x0200), + ("AttributeType", 0x0300), + ("EllipticCurve", 0x0400), + ("Extension", 0x0500), + ("KeyPurpose", 0x0600) ] # Table of well-known ASN.1 object IDs @@ -91,55 +92,92 @@ def identity(n): # ----------------- -------------------------- -------- ------------------------------------------------------------------------------------------------ # Public Key Algorithms - ( "PubKeyAlgo", "ECPublicKey", 1, [ iso(1), member_body(2), us(840), ansi_X9_62(10045), keyType(2), 1 ] ), + ("PubKeyAlgo", "ECPublicKey", 1, [ + iso(1), member_body(2), us(840), ansi_X9_62(10045), keyType(2), 1]), # Signature Algorithms # RFC 3279 - ( "SigAlgo", "ECDSAWithSHA256", 1, [ iso(1), member_body(2), us(840), ansi_X9_62(10045), signatures(4), 3, 2 ] ), + ("SigAlgo", "ECDSAWithSHA256", 1, [ + iso(1), member_body(2), us(840), ansi_X9_62(10045), signatures(4), 3, 2]), # X.509 Distinguished Name Attribute Types # WARNING -- Assign no values higher than 127. - ( "AttributeType", "CommonName", 1, [ joint_iso_ccitt(2), ds(5), 4, 3 ] ), - ( "AttributeType", "Surname", 2, [ joint_iso_ccitt(2), ds(5), 4, 4 ] ), - ( "AttributeType", "SerialNumber", 3, [ joint_iso_ccitt(2), ds(5), 4, 5 ] ), - ( "AttributeType", "CountryName", 4, [ joint_iso_ccitt(2), ds(5), 4, 6 ] ), - ( "AttributeType", "LocalityName", 5, [ joint_iso_ccitt(2), ds(5), 4, 7 ] ), - ( "AttributeType", "StateOrProvinceName", 6, [ joint_iso_ccitt(2), ds(5), 4, 8 ] ), - ( "AttributeType", "OrganizationName", 7, [ joint_iso_ccitt(2), ds(5), 4, 10 ] ), - ( "AttributeType", "OrganizationalUnitName", 8, [ joint_iso_ccitt(2), ds(5), 4, 11 ] ), - ( "AttributeType", "Title", 9, [ joint_iso_ccitt(2), ds(5), 4, 12 ] ), - ( "AttributeType", "Name", 10, [ joint_iso_ccitt(2), ds(5), 4, 41 ] ), - ( "AttributeType", "GivenName", 11, [ joint_iso_ccitt(2), ds(5), 4, 42 ] ), - ( "AttributeType", "Initials", 12, [ joint_iso_ccitt(2), ds(5), 4, 43 ] ), - ( "AttributeType", "GenerationQualifier", 13, [ joint_iso_ccitt(2), ds(5), 4, 44 ] ), - ( "AttributeType", "DNQualifier", 14, [ joint_iso_ccitt(2), ds(5), 4, 46 ] ), - ( "AttributeType", "Pseudonym", 15, [ joint_iso_ccitt(2), ds(5), 4, 65 ] ), - ( "AttributeType", "DomainComponent", 16, [ itu_t(0), 9, 2342, 19200300, 100, 1, 25 ] ), - ( "AttributeType", "ChipNodeId", 17, [ iso(1), organization(3), dod(6), internet(1), private(4), enterprise(1), zigbee(37244), chip(1), 1 ] ), - ( "AttributeType", "ChipFirmwareSigningId", 18, [ iso(1), organization(3), dod(6), internet(1), private(4), enterprise(1), zigbee(37244), chip(1), 2 ] ), - ( "AttributeType", "ChipICAId", 19, [ iso(1), organization(3), dod(6), internet(1), private(4), enterprise(1), zigbee(37244), chip(1), 3 ] ), - ( "AttributeType", "ChipRootId", 20, [ iso(1), organization(3), dod(6), internet(1), private(4), enterprise(1), zigbee(37244), chip(1), 4 ] ), - ( "AttributeType", "ChipFabricId", 21, [ iso(1), organization(3), dod(6), internet(1), private(4), enterprise(1), zigbee(37244), chip(1), 5 ] ), - ( "AttributeType", "ChipAuthTag1", 22, [ iso(1), organization(3), dod(6), internet(1), private(4), enterprise(1), zigbee(37244), chip(1), 6 ] ), - ( "AttributeType", "ChipAuthTag2", 23, [ iso(1), organization(3), dod(6), internet(1), private(4), enterprise(1), zigbee(37244), chip(1), 7 ] ), + ("AttributeType", "CommonName", 1, + [joint_iso_ccitt(2), ds(5), 4, 3]), + ("AttributeType", "Surname", 2, + [joint_iso_ccitt(2), ds(5), 4, 4]), + ("AttributeType", "SerialNumber", + 3, [joint_iso_ccitt(2), ds(5), 4, 5]), + ("AttributeType", "CountryName", + 4, [joint_iso_ccitt(2), ds(5), 4, 6]), + ("AttributeType", "LocalityName", + 5, [joint_iso_ccitt(2), ds(5), 4, 7]), + ("AttributeType", "StateOrProvinceName", + 6, [joint_iso_ccitt(2), ds(5), 4, 8]), + ("AttributeType", "OrganizationName", + 7, [joint_iso_ccitt(2), ds(5), 4, 10]), + ("AttributeType", "OrganizationalUnitName", + 8, [joint_iso_ccitt(2), ds(5), 4, 11]), + ("AttributeType", "Title", 9, + [joint_iso_ccitt(2), ds(5), 4, 12]), + ("AttributeType", "Name", 10, + [joint_iso_ccitt(2), ds(5), 4, 41]), + ("AttributeType", "GivenName", 11, + [joint_iso_ccitt(2), ds(5), 4, 42]), + ("AttributeType", "Initials", 12, + [joint_iso_ccitt(2), ds(5), 4, 43]), + ("AttributeType", "GenerationQualifier", + 13, [joint_iso_ccitt(2), ds(5), 4, 44]), + ("AttributeType", "DNQualifier", 14, + [joint_iso_ccitt(2), ds(5), 4, 46]), + ("AttributeType", "Pseudonym", 15, + [joint_iso_ccitt(2), ds(5), 4, 65]), + ("AttributeType", "DomainComponent", 16, + [itu_t(0), 9, 2342, 19200300, 100, 1, 25]), + ("AttributeType", "ChipNodeId", 17, [iso(1), organization( + 3), dod(6), internet(1), private(4), enterprise(1), zigbee(37244), chip(1), 1]), + ("AttributeType", "ChipFirmwareSigningId", 18, [iso(1), organization( + 3), dod(6), internet(1), private(4), enterprise(1), zigbee(37244), chip(1), 2]), + ("AttributeType", "ChipICAId", 19, [iso(1), organization( + 3), dod(6), internet(1), private(4), enterprise(1), zigbee(37244), chip(1), 3]), + ("AttributeType", "ChipRootId", 20, [iso(1), organization( + 3), dod(6), internet(1), private(4), enterprise(1), zigbee(37244), chip(1), 4]), + ("AttributeType", "ChipFabricId", 21, [iso(1), organization( + 3), dod(6), internet(1), private(4), enterprise(1), zigbee(37244), chip(1), 5]), + ("AttributeType", "ChipAuthTag1", 22, [iso(1), organization( + 3), dod(6), internet(1), private(4), enterprise(1), zigbee(37244), chip(1), 6]), + ("AttributeType", "ChipAuthTag2", 23, [iso(1), organization( + 3), dod(6), internet(1), private(4), enterprise(1), zigbee(37244), chip(1), 7]), # Elliptic Curves - ( "EllipticCurve", "prime256v1", 1, [ iso(1), member_body(2), us(840), ansi_X9_62(10045), curves(3), prime(1), 7 ] ), + ("EllipticCurve", "prime256v1", 1, [ + iso(1), member_body(2), us(840), ansi_X9_62(10045), curves(3), prime(1), 7]), # Certificate Extensions - ( "Extension", "BasicConstraints", 1, [ joint_iso_ccitt(2), ds(5), 29, 19 ] ), - ( "Extension", "KeyUsage", 2, [ joint_iso_ccitt(2), ds(5), 29, 15 ] ), - ( "Extension", "ExtendedKeyUsage", 3, [ joint_iso_ccitt(2), ds(5), 29, 37 ] ), - ( "Extension", "SubjectKeyIdentifier", 4, [ joint_iso_ccitt(2), ds(5), 29, 14 ] ), - ( "Extension", "AuthorityKeyIdentifier", 5, [ joint_iso_ccitt(2), ds(5), 29, 35 ] ), + ("Extension", "BasicConstraints", + 1, [joint_iso_ccitt(2), ds(5), 29, 19]), + ("Extension", "KeyUsage", 2, + [joint_iso_ccitt(2), ds(5), 29, 15]), + ("Extension", "ExtendedKeyUsage", + 3, [joint_iso_ccitt(2), ds(5), 29, 37]), + ("Extension", "SubjectKeyIdentifier", + 4, [joint_iso_ccitt(2), ds(5), 29, 14]), + ("Extension", "AuthorityKeyIdentifier", + 5, [joint_iso_ccitt(2), ds(5), 29, 35]), # Key Purposes - ( "KeyPurpose", "ServerAuth", 1, [ iso(1), organization(3), dod(6), internet(1), security(5), mechanisms(5), pkix(7), 3, 1 ] ), - ( "KeyPurpose", "ClientAuth", 2, [ iso(1), organization(3), dod(6), internet(1), security(5), mechanisms(5), pkix(7), 3, 2 ] ), - ( "KeyPurpose", "CodeSigning", 3, [ iso(1), organization(3), dod(6), internet(1), security(5), mechanisms(5), pkix(7), 3, 3 ] ), - ( "KeyPurpose", "EmailProtection", 4, [ iso(1), organization(3), dod(6), internet(1), security(5), mechanisms(5), pkix(7), 3, 4 ] ), - ( "KeyPurpose", "TimeStamping", 5, [ iso(1), organization(3), dod(6), internet(1), security(5), mechanisms(5), pkix(7), 3, 8 ] ), - ( "KeyPurpose", "OCSPSigning", 6, [ iso(1), organization(3), dod(6), internet(1), security(5), mechanisms(5), pkix(7), 3, 9 ] ), + ("KeyPurpose", "ServerAuth", 1, [iso(1), organization( + 3), dod(6), internet(1), security(5), mechanisms(5), pkix(7), 3, 1]), + ("KeyPurpose", "ClientAuth", 2, [iso(1), organization( + 3), dod(6), internet(1), security(5), mechanisms(5), pkix(7), 3, 2]), + ("KeyPurpose", "CodeSigning", 3, [iso(1), organization( + 3), dod(6), internet(1), security(5), mechanisms(5), pkix(7), 3, 3]), + ("KeyPurpose", "EmailProtection", 4, [iso(1), organization( + 3), dod(6), internet(1), security(5), mechanisms(5), pkix(7), 3, 4]), + ("KeyPurpose", "TimeStamping", 5, [iso(1), organization( + 3), dod(6), internet(1), security(5), mechanisms(5), pkix(7), 3, 8]), + ("KeyPurpose", "OCSPSigning", 6, [iso(1), organization( + 3), dod(6), internet(1), security(5), mechanisms(5), pkix(7), 3, 9]), ] @@ -147,12 +185,12 @@ def encodeOID(oid): assert len(oid) >= 2 - oid = [ (oid[0]*40 + oid[1]) ] + oid[2:] + oid = [(oid[0]*40 + oid[1])] + oid[2:] encodedOID = [] for val in oid: val, byte = divmod(val, 128) - seg = [ byte ] + seg = [byte] while val > 0: val, byte = divmod(val, 128) seg.insert(0, byte + 0x80) @@ -241,60 +279,66 @@ def encodeOID(oid): #endif // ASN1_DEFINE_OID_NAME_TABLE ''' -oid_category_enums ="{\n" +oid_category_enums = "{\n" for (catName, catEnum) in oidCategories: - oid_category_enums +=" kOIDCategory_%s = 0x%04X,\n" % (catName, catEnum) -oid_category_enums +=''' + oid_category_enums += " kOIDCategory_%s = 0x%04X,\n" % ( + catName, catEnum) +oid_category_enums += ''' kOIDCategory_NotSpecified = 0, kOIDCategory_Unknown = 0x0F00, kOIDCategory_Mask = 0x0F00 };''' -oid_enums ="{\n" +oid_enums = "{\n" for (catName, catEnum) in oidCategories: for (oidCatName, oidName, oidEnum, oid) in oids: if (oidCatName == catName): - oid_enums +=" kOID_%s_%s = 0x%04X,\n" % (catName, oidName, catEnum + oidEnum) - oid_enums +="\n" -oid_enums +=''' kOID_NotSpecified = 0, + oid_enums += " kOID_%s_%s = 0x%04X,\n" % ( + catName, oidName, catEnum + oidEnum) + oid_enums += "\n" +oid_enums += ''' kOID_NotSpecified = 0, kOID_Unknown = 0xFFFF, kOID_EnumMask = 0x00FF };''' -oid_utf8_strings ="\n" +oid_utf8_strings = "\n" for (catName, oidName, oidEnum, oid) in oids: - oid_utf8_strings +="static const uint8_t sOID_%s_%s[] = { %s };\n" % (catName, oidName, ", ".join([ "0x%02X" % (x) for x in encodeOID(oid) ])) + oid_utf8_strings += "static const uint8_t sOID_%s_%s[] = { %s };\n" % ( + catName, oidName, ", ".join(["0x%02X" % (x) for x in encodeOID(oid)])) -oid_table ="{\n" +oid_table = "{\n" for (catName, oidName, oidEnum, oid) in oids: - oid_table +=" { kOID_%s_%s, sOID_%s_%s, sizeof(sOID_%s_%s) },\n" % (catName, oidName, catName, oidName, catName, oidName) -oid_table +=" { kOID_NotSpecified, NULL, 0 }\n};" + oid_table += " { kOID_%s_%s, sOID_%s_%s, sizeof(sOID_%s_%s) },\n" % ( + catName, oidName, catName, oidName, catName, oidName) +oid_table += " { kOID_NotSpecified, NULL, 0 }\n};" -oid_name_table ="{\n" +oid_name_table = "{\n" for (catName, oidName, oidEnum, oid) in oids: - oid_name_table +=" { kOID_%s_%s, \"%s\" },\n" % (catName, oidName, oidName) -oid_name_table +=" { kOID_NotSpecified, NULL }\n};" + oid_name_table += " { kOID_%s_%s, \"%s\" },\n" % ( + catName, oidName, oidName) +oid_name_table += " { kOID_NotSpecified, NULL }\n};" def main(argv): - parser = optparse.OptionParser() + parser = optparse.OptionParser() + + parser.add_option('--output_file') - parser.add_option('--output_file') + options, _ = parser.parse_args(argv) - options, _ = parser.parse_args(argv) + template_args = { + 'oid_category_enums': oid_category_enums, + 'oid_enums': oid_enums, + 'oid_utf8_strings': oid_utf8_strings, + 'oid_table': oid_table, + 'oid_name_table': oid_name_table, + } - template_args = { - 'oid_category_enums': oid_category_enums, - 'oid_enums': oid_enums, - 'oid_utf8_strings': oid_utf8_strings, - 'oid_table': oid_table, - 'oid_name_table': oid_name_table, - } + with open(options.output_file, 'w') as asn1oid_file: + asn1oid_file.write(TEMPLATE % template_args) - with open(options.output_file, 'w') as asn1oid_file: - asn1oid_file.write(TEMPLATE % template_args) + return 0 - return 0 if __name__ == '__main__': sys.exit(main(sys.argv[1:])) diff --git a/src/lib/support/verhoeff/Verhoeff.py b/src/lib/support/verhoeff/Verhoeff.py index 7ceb1bcafe6b02..caa5fe8b43c9e8 100755 --- a/src/lib/support/verhoeff/Verhoeff.py +++ b/src/lib/support/verhoeff/Verhoeff.py @@ -30,94 +30,113 @@ import sys from six.moves import range -__all__ = [ 'ComputeCheckChar', 'VerifyCheckChar', - 'ComputeCheckChar16', 'VerifyCheckChar16', - 'ComputeCheckChar32', 'VerifyCheckChar32', - 'ComputeCheckChar36', 'VerifyCheckChar36' ] +__all__ = ['ComputeCheckChar', 'VerifyCheckChar', + 'ComputeCheckChar16', 'VerifyCheckChar16', + 'ComputeCheckChar32', 'VerifyCheckChar32', + 'ComputeCheckChar36', 'VerifyCheckChar36'] CharSet_Base10 = "0123456789" CharSet_Base16 = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ" -CharSet_Base32 = "0123456789ABCDEFGHJKLMNPRSTUVWXY" # Excludes I, O, Q and Z +CharSet_Base32 = "0123456789ABCDEFGHJKLMNPRSTUVWXY" # Excludes I, O, Q and Z CharSet_Base36 = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ" -PermTable_Base10 = [ 1, 5, 7, 6, 2, 8, 3, 0, 9, 4 ] -PermTable_Base16 = [ 4, 7, 5, 14, 8, 12, 15, 0, 2, 11, 3, 13, 10, 6, 9, 1 ] -PermTable_Base32 = [ 7, 2, 1, 30, 16, 20, 27, 11, 31, 6, 8, 13, 29, 5, 10, 21, 22, 3, 24, 0, 23, 25, 12, 9, 28, 14, 4, 15, 17, 18, 19, 26 ] -PermTable_Base36 = [ 29, 0, 32, 11, 35, 20, 7, 27, 2, 4, 19, 28, 30, 1, 5, 12, 3, 9, 16, 22, 6, 33, 8, 24, 26, 21, 14, 10, 34, 31, 15, 25, 17, 13, 23, 18 ] +PermTable_Base10 = [1, 5, 7, 6, 2, 8, 3, 0, 9, 4] +PermTable_Base16 = [4, 7, 5, 14, 8, 12, + 15, 0, 2, 11, 3, 13, 10, 6, 9, 1] +PermTable_Base32 = [7, 2, 1, 30, 16, 20, 27, 11, 31, 6, 8, 13, 29, 5, + 10, 21, 22, 3, 24, 0, 23, 25, 12, 9, 28, 14, 4, 15, 17, 18, 19, 26] +PermTable_Base36 = [29, 0, 32, 11, 35, 20, 7, 27, 2, 4, 19, 28, 30, 1, 5, 12, + 3, 9, 16, 22, 6, 33, 8, 24, 26, 21, 14, 10, 34, 31, 15, 25, 17, 13, 23, 18] + def DihedralMultiply(x, y, n): - n2 = n * 2 - - x = x % n2 - y = y % n2 - - if (x < n): - if (y < n): - return (x + y) % n - else: - return ((x + (y - n)) % n) + n - else: - if (y < n): - return ((n + (x - n) - y) % n) + n - else: - return (n + (x - n) - (y - n)) % n + n2 = n * 2 + + x = x % n2 + y = y % n2 + + if (x < n): + if (y < n): + return (x + y) % n + else: + return ((x + (y - n)) % n) + n + else: + if (y < n): + return ((n + (x - n) - y) % n) + n + else: + return (n + (x - n) - (y - n)) % n + def DihedralInvert(val, n): - if (val > 0 and val < n): - return n - val - else: - return val + if (val > 0 and val < n): + return n - val + else: + return val + def Permute(val, permTable, iterCount): - val = val % len(permTable); - if (iterCount == 0): - return val - else: - return Permute(permTable[val], permTable, iterCount - 1) + val = val % len(permTable) + if (iterCount == 0): + return val + else: + return Permute(permTable[val], permTable, iterCount - 1) + def _ComputeCheckChar(str, strLen, polygonSize, permTable, charSet): - str = str.upper() - c = 0 - for i in range(1, strLen+1): - ch = str[strLen - i] - val = charSet.index(ch) - p = Permute(val, permTable, i) - c = DihedralMultiply(c, p, polygonSize) - c = DihedralInvert(c, polygonSize) - return charSet[c] + str = str.upper() + c = 0 + for i in range(1, strLen+1): + ch = str[strLen - i] + val = charSet.index(ch) + p = Permute(val, permTable, i) + c = DihedralMultiply(c, p, polygonSize) + c = DihedralInvert(c, polygonSize) + return charSet[c] + def ComputeCheckChar(str, charSet=CharSet_Base10): - return _ComputeCheckChar(str, len(str), polygonSize=5, permTable=PermTable_Base10, charSet=charSet) + return _ComputeCheckChar(str, len(str), polygonSize=5, permTable=PermTable_Base10, charSet=charSet) + def VerifyCheckChar(str, charSet=CharSet_Base10): - expectedCheckCh = _ComputeCheckChar(str, len(str)-1, polygonSize=5, permTable=PermTable_Base10, charSet=CharSet_Base10) - return str[-1] == expectedCheckCh + expectedCheckCh = _ComputeCheckChar(str, len( + str)-1, polygonSize=5, permTable=PermTable_Base10, charSet=CharSet_Base10) + return str[-1] == expectedCheckCh + def ComputeCheckChar16(str, charSet=CharSet_Base16): - return _ComputeCheckChar(str, len(str), polygonSize=8, permTable=PermTable_Base16, charSet=charSet) + return _ComputeCheckChar(str, len(str), polygonSize=8, permTable=PermTable_Base16, charSet=charSet) + def VerifyCheckChar16(str, charSet=CharSet_Base16): - expectedCheckCh = _ComputeCheckChar(str, len(str)-1, polygonSize=8, permTable=PermTable_Base16, charSet=charSet) - return str[-1] == expectedCheckCh + expectedCheckCh = _ComputeCheckChar( + str, len(str)-1, polygonSize=8, permTable=PermTable_Base16, charSet=charSet) + return str[-1] == expectedCheckCh + def ComputeCheckChar32(str, charSet=CharSet_Base32): - return _ComputeCheckChar(str, len(str), polygonSize=16, permTable=PermTable_Base32, charSet=charSet) + return _ComputeCheckChar(str, len(str), polygonSize=16, permTable=PermTable_Base32, charSet=charSet) + def VerifyCheckChar32(str, charSet=CharSet_Base32): - expectedCheckCh = _ComputeCheckChar(str, len(str)-1, polygonSize=16, permTable=PermTable_Base32, charSet=charSet) - return str[-1] == expectedCheckCh + expectedCheckCh = _ComputeCheckChar( + str, len(str)-1, polygonSize=16, permTable=PermTable_Base32, charSet=charSet) + return str[-1] == expectedCheckCh + def ComputeCheckChar36(str, charSet=CharSet_Base36): - return _ComputeCheckChar(str, len(str), polygonSize=18, permTable=PermTable_Base36, charSet=charSet) + return _ComputeCheckChar(str, len(str), polygonSize=18, permTable=PermTable_Base36, charSet=charSet) + def VerifyCheckChar36(str, charSet=CharSet_Base36): - expectedCheckCh = _ComputeCheckChar(str, len(str)-1, polygonSize=18, permTable=PermTable_Base36, charSet=charSet) - return str[-1] == expectedCheckCh + expectedCheckCh = _ComputeCheckChar( + str, len(str)-1, polygonSize=18, permTable=PermTable_Base36, charSet=charSet) + return str[-1] == expectedCheckCh if __name__ == "__main__": - usage = """Usage: %s [ ] + usage = """Usage: %s [ ] Commands: generate @@ -125,40 +144,40 @@ def VerifyCheckChar36(str, charSet=CharSet_Base36): gen-multiply-table """ % (sys.argv[0]) - if (len(sys.argv) < 2): - print(usage) - elif (sys.argv[1] == "generate"): - if (len(sys.argv) < 3): - print(usage) - sys.exit(-1) - ch = ComputeCheckChar(sys.argv[2]) - print("%s%c" % (sys.argv[2], ch)) - elif (sys.argv[1] == "verify"): - if (len(sys.argv) < 3): - print(usage) - sys.exit(-1) - if (VerifyCheckChar(sys.argv[2])): - print("%s is VALID" % (sys.argv[2])) - sys.exit(0) - else: - print("%s is INVALID" % (sys.argv[2])) - sys.exit(-1) - elif (sys.argv[1] == "gen-multiply-table"): - if (len(sys.argv) < 3): - print(usage) - sys.exit(-1) - base = int(sys.argv[2]) - if (base % 2 != 0): - print("Base must be even") - sys.exit(-1); - n = base / 2 - for x in range(0, base): - sys.stdout.write(" ") - for y in range(0, base): - o = DihedralMultiply(x, y, n) - sys.stdout.write("%2d, " % o) - sys.stdout.write("\n") - else: - print(usage) - sys.exit(-1) - sys.exit(0) + if (len(sys.argv) < 2): + print(usage) + elif (sys.argv[1] == "generate"): + if (len(sys.argv) < 3): + print(usage) + sys.exit(-1) + ch = ComputeCheckChar(sys.argv[2]) + print("%s%c" % (sys.argv[2], ch)) + elif (sys.argv[1] == "verify"): + if (len(sys.argv) < 3): + print(usage) + sys.exit(-1) + if (VerifyCheckChar(sys.argv[2])): + print("%s is VALID" % (sys.argv[2])) + sys.exit(0) + else: + print("%s is INVALID" % (sys.argv[2])) + sys.exit(-1) + elif (sys.argv[1] == "gen-multiply-table"): + if (len(sys.argv) < 3): + print(usage) + sys.exit(-1) + base = int(sys.argv[2]) + if (base % 2 != 0): + print("Base must be even") + sys.exit(-1) + n = base / 2 + for x in range(0, base): + sys.stdout.write(" ") + for y in range(0, base): + o = DihedralMultiply(x, y, n) + sys.stdout.write("%2d, " % o) + sys.stdout.write("\n") + else: + print(usage) + sys.exit(-1) + sys.exit(0) diff --git a/src/test_driver/linux-cirque/InteractionModelTest.py b/src/test_driver/linux-cirque/InteractionModelTest.py index 3412de985f61bc..50c826006714a5 100755 --- a/src/test_driver/linux-cirque/InteractionModelTest.py +++ b/src/test_driver/linux-cirque/InteractionModelTest.py @@ -31,7 +31,8 @@ '%(asctime)s [%(name)s] %(levelname)s %(message)s')) logger.addHandler(sh) -CHIP_REPO = os.path.join(os.path.abspath(os.path.dirname(__file__)), "..", "..", "..") +CHIP_REPO = os.path.join(os.path.abspath( + os.path.dirname(__file__)), "..", "..", "..") DEVICE_CONFIG = { 'device0': { @@ -83,7 +84,9 @@ def run_data_model_test(self): self.execute_device_cmd(id, "CHIPCirqueDaemon.py -- run gdb -batch -return-child-result -q -ex run -ex bt {}".format( os.path.join(CHIP_REPO, "out/debug/linux_x64_gcc/chip-im-responder"))) - command = "gdb -return-child-result -q -ex run -ex bt --args "+ os.path.join(CHIP_REPO, "out/debug/linux_x64_gcc/chip-im-initiator") +" {}" + command = "gdb -return-child-result -q -ex run -ex bt --args " + \ + os.path.join( + CHIP_REPO, "out/debug/linux_x64_gcc/chip-im-initiator") + " {}" for ip in resp_ips: ret = self.execute_device_cmd( diff --git a/src/test_driver/linux-cirque/ManualTest.py b/src/test_driver/linux-cirque/ManualTest.py index 883de3f013d6f5..63e91feb2e92e3 100755 --- a/src/test_driver/linux-cirque/ManualTest.py +++ b/src/test_driver/linux-cirque/ManualTest.py @@ -46,7 +46,8 @@ ############################################################# CIRQUE_URL = "http://localhost:5000" -CHIP_REPO = os.path.join(os.path.abspath(os.path.dirname(__file__)), "..", "..", "..") +CHIP_REPO = os.path.join(os.path.abspath( + os.path.dirname(__file__)), "..", "..", "..") logger = logging.getLogger('CHIPCirqueTest') logger.setLevel(logging.INFO) @@ -57,6 +58,7 @@ '%(asctime)s [%(name)s] %(levelname)s %(message)s')) logger.addHandler(sh) + class TestManually(CHIPVirtualHome): def __init__(self, device_config): super().__init__(CIRQUE_URL, device_config) @@ -73,7 +75,8 @@ def test_routine(self): def wait_for_interrupt(self): self.logger.info("Finished setting up environment.") for device in self.non_ap_devices: - self.logger.info("Device: {} (Type: {}, Container: {})".format(device["type"], device["type"], device["id"][:10])) + self.logger.info("Device: {} (Type: {}, Container: {})".format( + device["type"], device["type"], device["id"][:10])) self.logger.info("Press Ctrl-C to stop the test.") self.logger.info("Container will be cleaned when the test finished.") try: @@ -82,17 +85,19 @@ def wait_for_interrupt(self): except KeyboardInterrupt: self.logger.info("KeyboardInterrupt received, quit now") + def _parse_mount_dir(config): for v in config.values(): if "Mount" not in v.get("capability", {}): continue _mount_pairs = v.get("mount_pairs", []) for mount in _mount_pairs: - mount[0] = mount[0].format(chip_repo = CHIP_REPO) - mount[1] = mount[1].format(chip_repo = CHIP_REPO) + mount[0] = mount[0].format(chip_repo=CHIP_REPO) + mount[1] = mount[1].format(chip_repo=CHIP_REPO) v["mount_pairs"] = _mount_pairs return config + if __name__ == "__main__": optParser = OptionParser() optParser.add_option( diff --git a/src/test_driver/linux-cirque/MobileDeviceTest.py b/src/test_driver/linux-cirque/MobileDeviceTest.py index 2c4d303473ef98..68a60a0342e436 100755 --- a/src/test_driver/linux-cirque/MobileDeviceTest.py +++ b/src/test_driver/linux-cirque/MobileDeviceTest.py @@ -90,7 +90,8 @@ def run_controller_test(self): CHIP_REPO, "out/debug/linux_x64_gcc/controller/python/chip-0.0-cp37-abi3-linux_x86_64.whl"))) command = "gdb -return-child-result -q -ex run -ex bt --args python3 {} -t 75 -a {}".format( - os.path.join(CHIP_REPO, "src/controller/python/test/test_scripts/mobile-device-test.py"), + os.path.join( + CHIP_REPO, "src/controller/python/test/test_scripts/mobile-device-test.py"), ethernet_ip) ret = self.execute_device_cmd(req_device_id, command) diff --git a/src/test_driver/linux-cirque/OnOffClusterTest.py b/src/test_driver/linux-cirque/OnOffClusterTest.py index 69c65395782de1..53f34a96bf3df1 100755 --- a/src/test_driver/linux-cirque/OnOffClusterTest.py +++ b/src/test_driver/linux-cirque/OnOffClusterTest.py @@ -85,7 +85,8 @@ def run_data_model_test(self): os.path.join(CHIP_REPO, "out/debug/standalone/chip-lighting-app"))) server_ip_address.add(self.get_device_thread_ip(device_id)) - chip_tool_path = os.path.join(CHIP_REPO, "out/debug/standalone/chip-tool") + chip_tool_path = os.path.join( + CHIP_REPO, "out/debug/standalone/chip-tool") command = chip_tool_path + " onoff {} 1" diff --git a/src/test_driver/linux-cirque/helper/CHIPTestBase.py b/src/test_driver/linux-cirque/helper/CHIPTestBase.py index d1d99fcbb58f3e..9ada727aee90b5 100644 --- a/src/test_driver/linux-cirque/helper/CHIPTestBase.py +++ b/src/test_driver/linux-cirque/helper/CHIPTestBase.py @@ -139,18 +139,22 @@ def reset_thread_devices(self, devices: Union[List[str], str]): devices = [devices] for device_id in devices: # Wait for otbr-agent and CHIP server start - self.assertTrue(self.wait_for_device_output(device_id, "Border router agent started.", 10)) - self.assertTrue(self.wait_for_device_output(device_id, "CHIP:SVR: Server Listening...", 15)) + self.assertTrue(self.wait_for_device_output( + device_id, "Border router agent started.", 10)) + self.assertTrue(self.wait_for_device_output( + device_id, "CHIP:SVR: Server Listening...", 15)) # Clear default Thread network commissioning data self.logger.info("Resetting thread network on {}".format( self.get_device_pretty_id(device_id))) self.execute_device_cmd(device_id, 'ot-ctl factoryreset') - self.check_device_thread_state(device_id=device_id, expected_role="disabled", timeout=10) + self.check_device_thread_state( + device_id=device_id, expected_role="disabled", timeout=10) def check_device_thread_state(self, device_id, expected_role, timeout): if isinstance(expected_role, str): expected_role = [expected_role] - self.logger.info(f"Waiting for expected role. {self.get_device_pretty_id(device_id)}: {expected_role}") + self.logger.info( + f"Waiting for expected role. {self.get_device_pretty_id(device_id)}: {expected_role}") start = time.time() while time.time() < (start + timeout): reply = self.execute_device_cmd(device_id, 'ot-ctl state') @@ -158,7 +162,8 @@ def check_device_thread_state(self, device_id, expected_role, timeout): return time.sleep(0.5) - self.logger.error(f"Device {self.get_device_pretty_id(device_id)} does not reach expected role") + self.logger.error( + f"Device {self.get_device_pretty_id(device_id)} does not reach expected role") raise AssertionError def form_thread_network(self, device_id: str, expected_role: Union[str, List[str]], timeout: int = 15, @@ -186,10 +191,12 @@ def form_thread_network(self, device_id: str, expected_role: Union[str, List[str "ot-ctl thread start", "ot-ctl dataset active", ] - self.logger.info(f"Setting Thread dataset for {self.get_device_pretty_id(device_id)}: {dataset}") + self.logger.info( + f"Setting Thread dataset for {self.get_device_pretty_id(device_id)}: {dataset}") for cmd in ot_init_commands: self.execute_device_cmd(device_id, cmd) - self.check_device_thread_state(device_id=device_id, expected_role=expected_role, timeout=timeout) + self.check_device_thread_state( + device_id=device_id, expected_role=expected_role, timeout=timeout) def connect_to_thread_network(self): ''' @@ -200,7 +207,8 @@ def connect_to_thread_network(self): ''' self.logger.info("Running commands to form default Thread network") for device in self.thread_devices: - self.wait_for_device_output(device['id'], "Border router agent started.", 5) + self.wait_for_device_output( + device['id'], "Border router agent started.", 5) otInitCommands = [ "ot-ctl thread stop", @@ -208,7 +216,7 @@ def connect_to_thread_network(self): "ot-ctl dataset set active 0e080000000000010000000300000d35060004001fffe00208dead00beef00cafe0708fd01234567890abc051000112233445566778899aabbccddeeff030a4f70656e546872656164010212340410ad463152f9622c7297ec6c6c543a63e70c0302a0ff", "ot-ctl ifconfig up", "ot-ctl thread start", - "ot-ctl dataset active", # Emit + "ot-ctl dataset active", # Emit ] for device in self.thread_devices: # Set default openthread provisioning @@ -222,7 +230,8 @@ def connect_to_thread_network(self): # We can only check the status of ot-agent by query its state. reply = self.execute_device_cmd(device['id'], 'ot-ctl state') roles.append(reply['output'].split()[0]) - threadNetworkFormed = (roles.count('leader') == 1) and (roles.count('leader') + roles.count('router') + roles.count('child') == len(self.thread_devices)) + threadNetworkFormed = (roles.count('leader') == 1) and (roles.count( + 'leader') + roles.count('router') + roles.count('child') == len(self.thread_devices)) if threadNetworkFormed: break time.sleep(1) @@ -267,7 +276,7 @@ def get_device_thread_ip(self, device_id): def get_device_log(self, device_id): return self.query_api('device_log', [self.home_id, device_id], binary=True) - def wait_for_device_output(self, device_id, pattern, timeout = 1): + def wait_for_device_output(self, device_id, pattern, timeout=1): due = time.time() + timeout while True: if self.sequenceMatch(self.get_device_log(device_id).decode(), [pattern, ]): diff --git a/third_party/ti_simplelink_sdk/run_sysconfig.py b/third_party/ti_simplelink_sdk/run_sysconfig.py index 0059ffab0f371a..97ea0ea2d51b9a 100644 --- a/third_party/ti_simplelink_sdk/run_sysconfig.py +++ b/third_party/ti_simplelink_sdk/run_sysconfig.py @@ -16,14 +16,14 @@ # Select OS specific helper script if sys.platform.startswith('linux'): - proc_call = [ sys.argv[1] + '/sysconfig_cli.sh' ] + proc_call = [sys.argv[1] + '/sysconfig_cli.sh'] elif sys.platform.startswith('win'): - proc_call = [sys.argv[1] + '/sysconfig_cli.bat' ] + proc_call = [sys.argv[1] + '/sysconfig_cli.bat'] else: # guess sh? - proc_call = [sys.argv[1] + '/sysconfig_cli.sh' ] + proc_call = [sys.argv[1] + '/sysconfig_cli.sh'] # Append cmd options proc_call += sys.argv[2:]