gcc: header-tools scripts updated to python3
Checks
Context |
Check |
Description |
linaro-tcwg-bot/tcwg_gcc_build--master-aarch64 |
success
|
Testing passed
|
linaro-tcwg-bot/tcwg_gcc_check--master-aarch64 |
success
|
Testing passed
|
linaro-tcwg-bot/tcwg_gcc_build--master-arm |
success
|
Testing passed
|
linaro-tcwg-bot/tcwg_gcc_check--master-arm |
success
|
Testing passed
|
Commit Message
From: Sundeep KOKKONDA <sundeep.kokkonda@windriver.com>
The scripts in contrib/header-tools/ are breaks with python3. These scripts are updated for python3 compatability.
Signed-off-by: Sundeep KOKKONDA <sundeep.kokkonda@windriver.com>
---
contrib/header-tools/count-headers | 8 +-
contrib/header-tools/gcc-order-headers | 64 ++++++++--------
contrib/header-tools/graph-header-logs | 42 +++++-----
contrib/header-tools/graph-include-web | 26 +++----
contrib/header-tools/headerutils.py | 36 ++++-----
contrib/header-tools/included-by | 32 ++++----
contrib/header-tools/reduce-headers | 102 ++++++++++++-------------
contrib/header-tools/replace-header | 12 +--
contrib/header-tools/show-headers | 32 ++++----
9 files changed, 177 insertions(+), 177 deletions(-)
Comments
Can I get some update on this patch. Is this patch ok? Will it be taken
to upstream sources?
On 29-Mar-24 15:52, sundeep.kokkonda@windriver.com wrote:
> From: Sundeep KOKKONDA <sundeep.kokkonda@windriver.com>
>
> The scripts in contrib/header-tools/ are breaks with python3. These scripts are updated for python3 compatability.
>
> Signed-off-by: Sundeep KOKKONDA <sundeep.kokkonda@windriver.com>
> ---
> contrib/header-tools/count-headers | 8 +-
> contrib/header-tools/gcc-order-headers | 64 ++++++++--------
> contrib/header-tools/graph-header-logs | 42 +++++-----
> contrib/header-tools/graph-include-web | 26 +++----
> contrib/header-tools/headerutils.py | 36 ++++-----
> contrib/header-tools/included-by | 32 ++++----
> contrib/header-tools/reduce-headers | 102 ++++++++++++-------------
> contrib/header-tools/replace-header | 12 +--
> contrib/header-tools/show-headers | 32 ++++----
> 9 files changed, 177 insertions(+), 177 deletions(-)
>
> diff --git a/contrib/header-tools/count-headers b/contrib/header-tools/count-headers
> index 7a92596a602..9e7046a2a1e 100755
> --- a/contrib/header-tools/count-headers
> +++ b/contrib/header-tools/count-headers
> @@ -1,4 +1,4 @@
> -#! /usr/bin/python2
> +#! /usr/bin/python3
> import os.path
> import sys
> import shlex
> @@ -45,11 +45,11 @@ if not usage and len (src) > 0:
> l.sort (key=lambda tup:tup[0], reverse=True)
>
> for f in l:
> - print str (f[0]) + " : " + f[1]
> + print(str (f[0]) + " : " + f[1])
>
> else:
> - print "count-headers file1 [filen]"
> - print "Count the number of occurrences of all includes across all listed files"
> + print("count-headers file1 [filen]")
> + print("Count the number of occurrences of all includes across all listed files")
>
>
>
> diff --git a/contrib/header-tools/gcc-order-headers b/contrib/header-tools/gcc-order-headers
> index ee76cba4b18..8d2e3cf6e77 100755
> --- a/contrib/header-tools/gcc-order-headers
> +++ b/contrib/header-tools/gcc-order-headers
> @@ -1,11 +1,11 @@
> -#! /usr/bin/python2
> +#! /usr/bin/python3
> import os
> import sys
> import shlex
> import re
>
> from headerutils import *
> -import Queue
> +import queue
>
> file_list = list ()
> usage = False
> @@ -77,19 +77,19 @@ def create_master_list (fn, verbose):
> if fn != "diagnostic.h" and fn != "diagnostic-core.h":
> master_list.append (fn)
> if (verbose):
> - print fn + " included by: " + includes[fn][0]
> + print(fn + " included by: " + includes[fn][0])
>
>
>
> def print_dups ():
> if dups:
> - print "\nduplicated includes"
> + print("\nduplicated includes")
> for i in dups:
> string = "dup : " + i + " : "
> string += includes[i][0]
> for i2 in dups[i]:
> string += ", "+i2
> - print string
> + print(string)
>
>
> def process_known_dups ():
> @@ -230,11 +230,11 @@ for arg in sys.argv[1:]:
> elif arg[0:2] == "-v":
> show_master = True
> else:
> - print "Error: unrecognized option " + arg
> + print("Error: unrecognized option " + arg)
> elif os.path.exists(arg):
> file_list.append (arg)
> else:
> - print "Error: file " + arg + " Does not exist."
> + print("Error: file " + arg + " Does not exist.")
> usage = True
>
> if not file_list and not show_master:
> @@ -242,7 +242,7 @@ if not file_list and not show_master:
>
> if not usage and not os.path.exists ("coretypes.h"):
> usage = True
> - print "Error: Must run command in main gcc source directory containing coretypes.h\n"
> + print("Error: Must run command in main gcc source directory containing coretypes.h\n")
>
> # process diagnostic.h first.. it's special since GCC_DIAG_STYLE can be
> # overridden by languages, but must be done so by a file included BEFORE it.
> @@ -268,20 +268,20 @@ process_known_dups ()
> desired_order = master_list
>
> if show_master:
> - print " Canonical order of gcc include files: "
> + print(" Canonical order of gcc include files: ")
> for x in master_list:
> - print x
> - print " "
> + print(x)
> + print(" ")
>
> if usage:
> - print "gcc-order-headers [-i] [-v] file1 [filen]"
> - print " Ensures gcc's headers files are included in a normalized form with"
> - print " redundant headers removed. The original files are saved in filename.bak"
> - print " Outputs a list of files which changed."
> - print " -i ignore conditional compilation."
> - print " Use after examining the file to be sure includes within #ifs are safe"
> - print " Any headers within conditional sections will be ignored."
> - print " -v Show the canonical order of known headers"
> + print("gcc-order-headers [-i] [-v] file1 [filen]")
> + print(" Ensures gcc's headers files are included in a normalized form with")
> + print(" redundant headers removed. The original files are saved in filename.bak")
> + print(" Outputs a list of files which changed.")
> + print(" -i ignore conditional compilation.")
> + print(" Use after examining the file to be sure includes within #ifs are safe")
> + print(" Any headers within conditional sections will be ignored.")
> + print(" -v Show the canonical order of known headers")
> sys.exit(0)
>
>
> @@ -303,7 +303,7 @@ for fn in file_list:
>
> if ii_include_list_cond (iinfo):
> if not ignore_conditional:
> - print fn + ": Cannot process due to conditional compilation of includes"
> + print(fn + ": Cannot process due to conditional compilation of includes")
> didnt_do.append (fn)
> src = list ()
>
> @@ -329,8 +329,8 @@ for fn in file_list:
> src_line[nm] = ii_src_line(iinfo)[d]
> if src_line[nm].find("/*") != -1 and src_line[nm].find("*/") == -1:
> # this means we have a multi line comment, abort!'
> - print fn + ": Cannot process due to a multi-line comment :"
> - print " " + src_line[nm]
> + print(fn + ": Cannot process due to a multi-line comment :")
> + print(" " + src_line[nm])
> if fn not in didnt_do:
> didnt_do.append (fn)
> src = list ()
> @@ -375,22 +375,22 @@ for fn in file_list:
> for line in new_src:
> fl.write (line)
> fl.close ()
> - print fn
> + print(fn )
>
>
> if didnt_do:
> - print "\n\n Did not process the following files due to conditional dependencies:"
> + print("\n\n Did not process the following files due to conditional dependencies:")
> str = ""
> for x in didnt_do:
> str += x + " "
> - print str
> - print "\n"
> - print "Please examine to see if they are safe to process, and re-try with -i. "
> - print "Safeness is determined by checking whether any of the reordered headers are"
> - print "within a conditional and could be hauled out of the conditional, thus changing"
> - print "what the compiler will see."
> - print "Multi-line comments after a #include can also cause failuer, they must be turned"
> - print "into single line comments or removed."
> + print(str)
> + print("\n")
> + print("Please examine to see if they are safe to process, and re-try with -i. ")
> + print("Safeness is determined by checking whether any of the reordered headers are")
> + print("within a conditional and could be hauled out of the conditional, thus changing")
> + print("what the compiler will see.")
> + print("Multi-line comments after a #include can also cause failuer, they must be turned")
> + print("into single line comments or removed.")
>
>
>
> diff --git a/contrib/header-tools/graph-header-logs b/contrib/header-tools/graph-header-logs
> index e537aaeac0c..b4038f57628 100755
> --- a/contrib/header-tools/graph-header-logs
> +++ b/contrib/header-tools/graph-header-logs
> @@ -1,4 +1,4 @@
> -#! /usr/bin/python2
> +#! /usr/bin/python3
> import os.path
> import sys
> import shlex
> @@ -53,7 +53,7 @@ ignore = [ "coretypes_h",
>
> def process_log_file (header, logfile):
> if header_roots.get (header) != None:
> - print "Error: already processed log file: " + header + ".log"
> + print("Error: already processed log file: " + header + ".log")
> return
> hname = pretty_name (header)
> header_roots[hname] = { }
> @@ -66,7 +66,7 @@ def process_log_file (header, logfile):
> if newinc:
> incfrom = list()
> newinc = False
> - fn = re.findall(ur".*/(.*?):", line)
> + fn = re.findall(r".*/(.*?):", line)
> if len(fn) != 1:
> continue
> if fn[0][-2:] != ".h":
> @@ -76,16 +76,16 @@ def process_log_file (header, logfile):
> incfrom.append (n)
> continue
> newinc = True
> - note = re.findall (ur"^.*note: (.*)", line)
> + note = re.findall (r"^.*note: (.*)", line)
> if len(note) > 0:
> sline.append (("note", note[0]))
> else:
> - err_msg = re.findall (ur"^.*: error: (.*)", line)
> + err_msg = re.findall (r"^.*: error: (.*)", line)
> if len(err_msg) == 1:
> msg = err_msg[0]
> if (len (re.findall("error: forward declaration", line))) != 0:
> continue
> - path = re.findall (ur"^(.*?):.*error: ", line)
> + path = re.findall (r"^(.*?):.*error: ", line)
> if len(path) != 1:
> continue
> if path[0][-2:] != ".h":
> @@ -95,7 +95,7 @@ def process_log_file (header, logfile):
> continue
> sline.append (("error", msg, fname, incfrom))
>
> - print str(len(sline)) + " lines to process"
> + print(str(len(sline)) + " lines to process")
> lastline = "note"
> for line in sline:
> if line[0] != "note" and lastline[0] == "error":
> @@ -111,7 +111,7 @@ def process_log_file (header, logfile):
> if ee not in extra_edges:
> extra_edges.append (ee)
> fname = t
> - print string
> + print(string)
>
> if hname not in nodes:
> nodes.append(hname)
> @@ -125,7 +125,7 @@ def process_log_file (header, logfile):
> if header_roots[hname].get(fname) == None:
> header_roots[hname][fname] = list()
> if msg not in header_roots[hname][fname]:
> - print string + ofname + " : " +msg
> + print(string + ofname + " : " +msg)
> header_roots[hname][fname].append (msg)
> lastline = line;
>
> @@ -181,10 +181,10 @@ def build_dot_file (file_list):
> if verbose:
> depcount.sort(key=lambda tup:tup[2])
> for x in depcount:
> - print " ("+str(x[2])+ ") : " + x[0] + " -> " + x[1]
> + print(" ("+str(x[2])+ ") : " + x[0] + " -> " + x[1])
> if (x[2] <= verbosity):
> for l in header_roots[x[0]][x[1]]:
> - print " " + l
> + print(" " + l)
>
> output.write ("}\n");
>
> @@ -205,7 +205,7 @@ for arg in sys.argv[1:]:
> if (verbosity == 9):
> verbosity = 9999
> elif arg[0:1] == "-":
> - print "Unrecognized option " + arg
> + print("Unrecognized option " + arg)
> dohelp = True
> else:
> files.append (arg)
> @@ -214,16 +214,16 @@ if len(sys.argv) == 1:
> dohelp = True
>
> if dohelp:
> - print "Parses the log files from the reduce-headers tool to generate"
> - print "dependency graphs for the include web for specified files."
> - print "Usage: [-nnum] [-h] [-v[n]] [-ooutput] file1 [[file2] ... [filen]]"
> - print " -ooutput : Specifies output to output.dot and output.png"
> - print " Defaults to 'graph.dot and graph.png"
> - print " -vn : verbose mode, shows the number of connections, and if n"
> - print " is specified, show the messages if # < n. 9 is infinity"
> - print " -h : help"
> + print("Parses the log files from the reduce-headers tool to generate")
> + print("dependency graphs for the include web for specified files.")
> + print("Usage: [-nnum] [-h] [-v[n]] [-ooutput] file1 [[file2] ... [filen]]")
> + print(" -ooutput : Specifies output to output.dot and output.png")
> + print(" Defaults to 'graph.dot and graph.png")
> + print(" -vn : verbose mode, shows the number of connections, and if n")
> + print(" is specified, show the messages if # < n. 9 is infinity")
> + print(" -h : help")
> else:
> - print files
> + print(files)
> build_dot_file (files)
> os.system ("dot -Tpng " + dotname + " -o" + graphname)
>
> diff --git a/contrib/header-tools/graph-include-web b/contrib/header-tools/graph-include-web
> index 47576a177de..dc2813bf1aa 100755
> --- a/contrib/header-tools/graph-include-web
> +++ b/contrib/header-tools/graph-include-web
> @@ -1,4 +1,4 @@
> -#! /usr/bin/python2
> +#! /usr/bin/python3
> import os.path
> import sys
> import shlex
> @@ -82,7 +82,7 @@ for arg in sys.argv[1:]:
> noterm = True
> elif arg[0:2] == "-f":
> if not os.path.exists (arg[2:]):
> - print "Option " + arg +" doesn't specify a proper file"
> + print("Option " + arg +" doesn't specify a proper file")
> dohelp = True
> else:
> sfile = open (arg[2:], "r")
> @@ -93,7 +93,7 @@ for arg in sys.argv[1:]:
> elif arg[0:2] == "-n":
> edge_thresh = int (arg[2:])
> elif arg[0:1] == "-":
> - print "Unrecognized option " + arg
> + print("Unrecognized option " + arg)
> dohelp = True
> else:
> files.append (arg)
> @@ -102,17 +102,17 @@ if len(sys.argv) == 1:
> dohelp = True
>
> if dohelp:
> - print "Generates a graph of the include web for specified files."
> - print "Usage: [-finput_file] [-h] [-ooutput] [file1 ... [filen]]"
> - print " -finput_file : Input file containing a list of files to process."
> - print " -ooutput : Specifies output to output.dot and output.png."
> - print " defaults to graph.dot and graph.png."
> - print " -nnum : Specifies the # of edges beyond which sfdp is invoked. def=0."
> - print " -a : Aggregate all .c files to 1 file. Shows only include web."
> - print " -at : Aggregate, but don't include terminal.h to .c links."
> - print " -h : Print this help."
> + print("Generates a graph of the include web for specified files.")
> + print("Usage: [-finput_file] [-h] [-ooutput] [file1 ... [filen]]")
> + print(" -finput_file : Input file containing a list of files to process.")
> + print(" -ooutput : Specifies output to output.dot and output.png.")
> + print(" defaults to graph.dot and graph.png.")
> + print(" -nnum : Specifies the # of edges beyond which sfdp is invoked. def=0.")
> + print(" -a : Aggregate all .c files to 1 file. Shows only include web.")
> + print(" -at : Aggregate, but don't include terminal.h to .c links.")
> + print(" -h : Print this help.")
> else:
> - print files
> + print(files)
> build_dot_file (files)
> if edges > edge_thresh:
> os.system ("sfdp -Tpng " + dotname + " -o" + graphname)
> diff --git a/contrib/header-tools/headerutils.py b/contrib/header-tools/headerutils.py
> index 95c47fb4b69..3f87b8bd4ab 100755
> --- a/contrib/header-tools/headerutils.py
> +++ b/contrib/header-tools/headerutils.py
> @@ -1,4 +1,4 @@
> -#! /usr/bin/python2
> +#! /usr/bin/python3
> import os.path
> import sys
> import shlex
> @@ -10,7 +10,7 @@ import pickle
> import multiprocessing
>
> def find_pound_include (line, use_outside, use_slash):
> - inc = re.findall (ur"^\s*#\s*include\s*\"(.+?)\"", line)
> + inc = re.findall (r"^\s*#\s*include\s*\"(.+?)\"", line)
> if len(inc) == 1:
> nm = inc[0]
> if use_outside or os.path.exists (nm):
> @@ -19,17 +19,17 @@ def find_pound_include (line, use_outside, use_slash):
> return ""
>
> def find_system_include (line):
> - inc = re.findall (ur"^\s*#\s*include\s*<(.+?)>", line)
> + inc = re.findall (r"^\s*#\s*include\s*<(.+?)>", line)
> if len(inc) == 1:
> return inc[0]
> return ""
>
> def find_pound_define (line):
> - inc = re.findall (ur"^\s*#\s*define ([A-Za-z0-9_]+)", line)
> + inc = re.findall (r"^\s*#\s*define ([A-Za-z0-9_]+)", line)
> if len(inc) != 0:
> if len(inc) > 1:
> - print "What? more than 1 match in #define??"
> - print inc
> + print("What? more than 1 match in #define??")
> + print(inc)
> sys.exit(5)
> return inc[0];
> return ""
> @@ -49,26 +49,26 @@ def is_pound_endif (line):
> return False
>
> def find_pound_if (line):
> - inc = re.findall (ur"^\s*#\s*if\s+(.*)", line)
> + inc = re.findall (r"^\s*#\s*if\s+(.*)", line)
> if len(inc) == 0:
> - inc = re.findall (ur"^\s*#\s*elif\s+(.*)", line)
> + inc = re.findall (r"^\s*#\s*elif\s+(.*)", line)
> if len(inc) > 0:
> - inc2 = re.findall (ur"defined\s*\((.+?)\)", inc[0])
> - inc3 = re.findall (ur"defined\s+([a-zA-Z0-9_]+)", inc[0])
> + inc2 = re.findall (r"defined\s*\((.+?)\)", inc[0])
> + inc3 = re.findall (r"defined\s+([a-zA-Z0-9_]+)", inc[0])
> for yy in inc3:
> inc2.append (yy)
> return inc2
> else:
> - inc = re.findall (ur"^\s*#\s*ifdef\s(.*)", line)
> + inc = re.findall (r"^\s*#\s*ifdef\s(.*)", line)
> if len(inc) == 0:
> - inc = re.findall (ur"^\s*#\s*ifndef\s(.*)", line)
> + inc = re.findall (r"^\s*#\s*ifndef\s(.*)", line)
> if len(inc) > 0:
> inc2 = re.findall ("[A-Za-z_][A-Za-z_0-9]*", inc[0])
> return inc2
> if len(inc) == 0:
> return list ()
> - print "WTF. more than one line returned for find_pound_if"
> - print inc
> + print("WTF. more than one line returned for find_pound_if")
> + print(inc)
> sys.exit(5)
>
>
> @@ -248,8 +248,8 @@ def find_gcc_bld_dir (path):
> for y in files:
> p = os.path.dirname (y)
> if os.path.basename (p) == "gcc":
> - blddir = p
> - break
> + blddir = p
> + break
>
> return blddir
>
> @@ -424,7 +424,7 @@ def find_replace_include (find, replace, src):
> # pass in a require and provide dictionary to be read in.
> def read_require_provides (require, provide):
> if not os.path.exists ("require-provide.master"):
> - print "require-provide.master file is not available. please run data collection."
> + print("require-provide.master file is not available. please run data collection.")
> sys.exit(1)
> incl_list = open("require-provide.master").read().splitlines()
> for f in incl_list:
> @@ -501,7 +501,7 @@ def spawn_makes (command_list):
> c = subprocess.Popen(command, bufsize=-1, stdout=devnull, stderr=subprocess.PIPE, shell=True)
> proc_res.append ((c, tname))
>
> - print text[:-2]
> + print(text[:-2])
>
> for p in proc_res:
> output = p[0].communicate()
> diff --git a/contrib/header-tools/included-by b/contrib/header-tools/included-by
> index 9947fee6b2b..505b6fa91c2 100755
> --- a/contrib/header-tools/included-by
> +++ b/contrib/header-tools/included-by
> @@ -1,4 +1,4 @@
> -#! /usr/bin/python2
> +#! /usr/bin/python3
> import os.path
> import sys
> import shlex
> @@ -43,7 +43,7 @@ for x in sys.argv[1:]:
> file_list = open (x[2:]).read().splitlines()
> scanfiles = False
> elif x[0] == "-":
> - print "Error: Unknown option " + x
> + print("Error: Unknown option " + x)
> usage = True
> else:
> src.append (x)
> @@ -89,21 +89,21 @@ if not usage:
> if len (careabout) < num_match:
> output = ""
> if output != "":
> - print output
> + print(output)
> else:
> - print "included-by [-h] [-i] [-c] [-v] [-a] [-nx] file1 [file2] ... [filen]"
> - print "find the list of all files in subdirectories that include any of "
> - print "the listed files. processed to a depth of 3 subdirs"
> - print " -h : Show this message"
> - print " -i : process only header files (*.h) for #include"
> - print " -c : process only source files (*.c *.cc) for #include"
> - print " If nothing is specified, defaults to -i -c"
> - print " -s : Same as -c."
> - print " -v : Show which include(s) were found"
> - print " -nx : Only list files which have at least x different matches. Default = 1"
> - print " -a : Show only files which all listed files are included"
> - print " This is equivilent to -nT where T == # of items in list"
> - print " -flistfile : Show only files contained in the list of files"
> + print("included-by [-h] [-i] [-c] [-v] [-a] [-nx] file1 [file2] ... [filen]")
> + print("find the list of all files in subdirectories that include any of ")
> + print("the listed files. processed to a depth of 3 subdirs")
> + print(" -h : Show this message")
> + print(" -i : process only header files (*.h) for #include")
> + print(" -c : process only source files (*.c *.cc) for #include")
> + print(" If nothing is specified, defaults to -i -c")
> + print(" -s : Same as -c.")
> + print(" -v : Show which include(s) were found")
> + print(" -nx : Only list files which have at least x different matches. Default = 1")
> + print(" -a : Show only files which all listed files are included")
> + print(" This is equivilent to -nT where T == # of items in list")
> + print(" -flistfile : Show only files contained in the list of files")
>
>
>
> diff --git a/contrib/header-tools/reduce-headers b/contrib/header-tools/reduce-headers
> index 7d236e30688..e1c809ad904 100755
> --- a/contrib/header-tools/reduce-headers
> +++ b/contrib/header-tools/reduce-headers
> @@ -1,4 +1,4 @@
> -#! /usr/bin/python2
> +#! /usr/bin/python3
> import os.path
> import sys
> import shlex
> @@ -177,7 +177,7 @@ def build_target_dict (bld_dir, just_these):
> target = t.strip()
> tpath = bld_dir + "/" + target
> if not os.path.exists (tpath + "/gcc"):
> - print "Error: gcc build directory for target " + t + " Does not exist: " + tpath + "/gcc"
> + print("Error: gcc build directory for target " + t + " Does not exist: " + tpath + "/gcc")
> error = True
> else:
> target_dict[target] = tpath
> @@ -209,7 +209,7 @@ def find_targets (src_file):
> targ_list = list()
> obj_name = get_obj_name (src_file)
> if not obj_name:
> - print "Error: " + src_file + " - Cannot determine object name."
> + print("Error: " + src_file + " - Cannot determine object name.")
> return list()
>
> # Put the high priority targets which tend to trigger failures first
> @@ -263,22 +263,22 @@ def try_to_remove (src_file, h_list, verbose):
> hostbuild = True
> if not target_dict:
> summary = src_file + ": Target builds are required for config files. None found."
> - print summary
> + print(summary)
> return summary
> if not targ_list:
> summary =src_file + ": Cannot find any targets which build this file."
> - print summary
> + print(summary)
> return summary
>
> if hostbuild:
> # confirm it actually builds before we do anything
> - print "Confirming source file builds"
> + print("Confirming source file builds")
> res = get_make_output (build_dir + "/gcc", "all")
> if res[0] != 0:
> message = "Error: " + src_file + " does not build currently."
> summary = src_file + " does not build on host."
> - print message
> - print res[1]
> + print(message)
> + print(res[1])
> if verbose:
> verbose.write (message + "\n")
> verbose.write (res[1]+ "\n")
> @@ -314,7 +314,7 @@ def try_to_remove (src_file, h_list, verbose):
> lookfor = ii_src_line(src_info)[inc_file]
> src_tmp.remove (lookfor)
> message = "Trying " + src_file + " without " + inc_file
> - print message
> + print(message)
> if verbose:
> verbose.write (message + "\n")
> out = open(src_file, "w")
> @@ -350,7 +350,7 @@ def try_to_remove (src_file, h_list, verbose):
> message = "Passed host and target builds"
>
> if keep:
> - print message + "\n"
> + print(message + "\n")
>
> if (rc != 0):
> if verbose:
> @@ -392,7 +392,7 @@ def try_to_remove (src_file, h_list, verbose):
> if ii_path(iinfo) == "build" and not target_dict:
> keep = True
> text = message + " : Will not remove a build file without some targets."
> - print text
> + print(text)
> ilog = open(src_file+".log","a")
> ilog.write (text +"\n")
> ilog.write ("============================================\n");
> @@ -414,7 +414,7 @@ def try_to_remove (src_file, h_list, verbose):
> text = message + ", but must keep " + inc_file + " because it provides " + dep
> if because.get(dep) != None:
> text = text + " Possibly required by " + because[dep]
> - print text
> + print(text)
> ilog = open(inc_file+".log","a")
> ilog.write (because[dep]+": Requires [dep] in "+src_file+"\n")
> ilog.write ("============================================\n");
> @@ -444,7 +444,7 @@ def try_to_remove (src_file, h_list, verbose):
>
> src_tmp = copy.deepcopy (src_data)
> else:
> - print message + " --> removing " + inc_file + "\n"
> + print(message + " --> removing " + inc_file + "\n")
> rmcount += 1
> if verbose:
> verbose.write (message + " --> removing " + inc_file + "\n")
> @@ -454,7 +454,7 @@ def try_to_remove (src_file, h_list, verbose):
> remove_count[inc_file] += 1
> src_data = copy.deepcopy (src_tmp)
> except:
> - print "Interuption: restoring original file"
> + print("Interuption: restoring original file")
> out = open(src_file, "w")
> for line in src_orig:
> out.write (line)
> @@ -473,8 +473,8 @@ def try_to_remove (src_file, h_list, verbose):
> rc = res[0]
> if (rc != 0):
> # host build failed! return to original version
> - print "Error: " + src_file + " Failed to bootstrap at end!!! restoring."
> - print " Bad version at " + src_file + ".bad"
> + print("Error: " + src_file + " Failed to bootstrap at end!!! restoring.")
> + print(" Bad version at " + src_file + ".bad")
> os.rename (src_file, src_file + ".bad")
> out = open(src_file, "w")
> for line in src_orig:
> @@ -486,7 +486,7 @@ def try_to_remove (src_file, h_list, verbose):
> summary = src_file + ": No change."
> else:
> summary = src_file + ": Reduction performed, "+str(rmcount)+" includes removed."
> - print summary
> + print(summary)
> return summary
>
> only_h = list ()
> @@ -513,11 +513,11 @@ for x in sys.argv[1:]:
> elif x[0:2] == "-t":
> target_dir = x[2:]
> elif x[0] == "-":
> - print "Error: Unrecognized option " + x
> + print("Error: Unrecognized option " + x)
> usgae = True
> else:
> if not os.path.exists (x):
> - print "Error: specified file " + x + " does not exist."
> + print("Error: specified file " + x + " does not exist.")
> usage = True
> else:
> src.append (x)
> @@ -526,50 +526,50 @@ if target_dir:
> build_target_dict (target_dir, only_targs)
>
> if build_dir == "" and target_dir == "":
> - print "Error: Must specify a build directory, and/or a target directory."
> + print("Error: Must specify a build directory, and/or a target directory.")
> usage = True
>
> if build_dir and not os.path.exists (build_dir):
> - print "Error: specified build directory does not exist : " + build_dir
> + print("Error: specified build directory does not exist : " + build_dir)
> usage = True
>
> if target_dir and not os.path.exists (target_dir):
> - print "Error: specified target directory does not exist : " + target_dir
> + print("Error: specified target directory does not exist : " + target_dir)
> usage = True
>
> if usage:
> - print "Attempts to remove extraneous include files from source files."
> - print " "
> - print "Should be run from the main gcc source directory, and works on a target"
> - print "directory, as we attempt to make the 'all' target."
> - print " "
> - print "By default, gcc-reorder-includes is run on each file before attempting"
> - print "to remove includes. this removes duplicates and puts some headers in a"
> - print "canonical ordering"
> - print " "
> - print "The build directory should be ready to compile via make. Time is saved"
> - print "if the build is already complete, so that only changes need to be built."
> - print " "
> - print "Usage: [options] file1.c [file2.c] ... [filen.c]"
> - print " -bdir : the root build directory to attempt buiding .o files."
> - print " -tdir : the target build directory"
> - print " -d : Ignore conditional macro dependencies."
> - print " "
> - print " -Dmacro : Ignore a specific macro for dependencies"
> - print " -Ttarget : Only consider target in target directory."
> - print " -fheader : Specifies a specific .h file to be considered."
> - print " "
> - print " -D, -T, and -f can be specified mulitple times and are aggregated."
> - print " "
> - print " The original file will be in filen.bak"
> - print " "
> + print("Attempts to remove extraneous include files from source files.")
> + print(" ")
> + print("Should be run from the main gcc source directory, and works on a target")
> + print("directory, as we attempt to make the 'all' target.")
> + print(" ")
> + print("By default, gcc-reorder-includes is run on each file before attempting")
> + print("to remove includes. this removes duplicates and puts some headers in a")
> + print("canonical ordering")
> + print(" ")
> + print("The build directory should be ready to compile via make. Time is saved")
> + print("if the build is already complete, so that only changes need to be built.")
> + print(" ")
> + print("Usage: [options] file1.c [file2.c] ... [filen.c]")
> + print(" -bdir : the root build directory to attempt buiding .o files.")
> + print(" -tdir : the target build directory")
> + print(" -d : Ignore conditional macro dependencies.")
> + print(" ")
> + print(" -Dmacro : Ignore a specific macro for dependencies")
> + print(" -Ttarget : Only consider target in target directory.")
> + print(" -fheader : Specifies a specific .h file to be considered.")
> + print(" ")
> + print(" -D, -T, and -f can be specified mulitple times and are aggregated.")
> + print(" ")
> + print(" The original file will be in filen.bak")
> + print(" ")
> sys.exit (0)
>
> if only_h:
> - print "Attempting to remove only these files:"
> + print("Attempting to remove only these files:")
> for x in only_h:
> - print x
> - print " "
> + print(x)
> + print(" ")
>
> logfile = open("reduce-headers.log","w")
>
> @@ -583,7 +583,7 @@ ilog = open("reduce-headers.sum","a")
> ilog.write ("===============================================================\n")
> for x in remove_count:
> msg = x + ": Removed " + str(remove_count[x]) + " times."
> - print msg
> + print(msg)
> logfile.write (msg + "\n")
> ilog.write (msg + "\n")
>
> diff --git a/contrib/header-tools/replace-header b/contrib/header-tools/replace-header
> index ce20096a453..fd07d2c88e5 100755
> --- a/contrib/header-tools/replace-header
> +++ b/contrib/header-tools/replace-header
> @@ -1,4 +1,4 @@
> -#! /usr/bin/python2
> +#! /usr/bin/python3
> import os.path
> import sys
> import shlex
> @@ -20,7 +20,7 @@ for x in sys.argv[1:]:
> elif x[0:2] == "-r":
> replace.append (x[2:])
> elif x[0:1] == "-":
> - print "Error: unrecognized option " + x
> + print("Error: unrecognized option " + x)
> usage = True
> else:
> files.append (x)
> @@ -29,25 +29,25 @@ if find == "":
> usage = True
>
> if usage:
> - print "replace-header -fheader -rheader [-rheader] file1 [filen.]"
> + print("replace-header -fheader -rheader [-rheader] file1 [filen.]")
> sys.exit(0)
>
> string = ""
> for x in replace:
> string = string + " '"+x+"'"
> -print "Replacing '"+find+"' with"+string
> +print("Replacing '"+find+"' with"+string)
>
> for x in files:
> src = readwholefile (x)
> src = find_replace_include (find, replace, src)
> if (len(src) > 0):
> - print x + ": Changed"
> + print(x + ": Changed")
> out = open(x, "w")
> for line in src:
> out.write (line);
> out.close ()
> else:
> - print x
> + print(x)
>
>
>
> diff --git a/contrib/header-tools/show-headers b/contrib/header-tools/show-headers
> index cb949ec1f44..86eceec71b6 100755
> --- a/contrib/header-tools/show-headers
> +++ b/contrib/header-tools/show-headers
> @@ -1,4 +1,4 @@
> -#! /usr/bin/python2
> +#! /usr/bin/python3
> import os.path
> import sys
> import shlex
> @@ -93,17 +93,17 @@ for x in sys.argv[1:]:
> if len(src) != 1:
> usage = True
> elif not os.path.exists (src[0]):
> - print src[0] + ": Requested source file does not exist.\n"
> + print(src[0] + ": Requested source file does not exist.\n")
> usage = True
>
> if usage:
> - print "show-headers [-idir] [-sfilen] file1 "
> - print " "
> - print " Show a hierarchical visual format how many times each header file"
> - print " is included in a source file. Should be run from the source directory"
> - print " files from find-include-depends"
> - print " -s : search for a header, and point it out."
> - print " -i : Specifies additonal directories to search for includes."
> + print("show-headers [-idir] [-sfilen] file1 ")
> + print(" ")
> + print(" Show a hierarchical visual format how many times each header file")
> + print(" is included in a source file. Should be run from the source directory")
> + print(" files from find-include-depends")
> + print(" -s : search for a header, and point it out.")
> + print(" -i : Specifies additonal directories to search for includes.")
> sys.exit(0)
>
>
> @@ -114,10 +114,10 @@ if extradir:
> blddir = find_gcc_bld_dir ("../..")
>
> if blddir:
> - print "Using build directory: " + blddir
> + print("Using build directory: " + blddir)
> incl_dirs.insert (0, blddir)
> else:
> - print "Could not find a build directory, better results if you specify one with -i"
> + print("Could not find a build directory, better results if you specify one with -i")
>
> # search path is now ".", blddir, extradirs_from_-i, built_in_incl_dirs
> incl_dirs.insert (0, ".")
> @@ -137,15 +137,15 @@ for line in data:
> if d and d[-2:] == ".h":
> process_include (d, 1)
>
> -print "\n" + x
> +print("\n" + x)
> for line in output:
> - print line
> + print(line)
>
> if highlight:
> - print " "
> + print(" ")
> for h in summary:
> - print h + " is included by source file."
> + print(h + " is included by source file.")
> for h in highlight:
> if h not in summary:
> - print h + " is not included by source file."
> + print(h + " is not included by source file.")
>
Reminder-2: Can I get some update on this patch? Is this patch ok? Will
it be taken to upstream sources?
On 02-Apr-24 11:28, Sundeep KOKKONDA wrote:
> Can I get some update on this patch. Is this patch ok? Will it be
> taken to upstream sources?
>
> On 29-Mar-24 15:52, sundeep.kokkonda@windriver.com wrote:
>> From: Sundeep KOKKONDA <sundeep.kokkonda@windriver.com>
>>
>> The scripts in contrib/header-tools/ are breaks with python3. These
>> scripts are updated for python3 compatability.
>>
>> Signed-off-by: Sundeep KOKKONDA <sundeep.kokkonda@windriver.com>
>> ---
>> contrib/header-tools/count-headers | 8 +-
>> contrib/header-tools/gcc-order-headers | 64 ++++++++--------
>> contrib/header-tools/graph-header-logs | 42 +++++-----
>> contrib/header-tools/graph-include-web | 26 +++----
>> contrib/header-tools/headerutils.py | 36 ++++-----
>> contrib/header-tools/included-by | 32 ++++----
>> contrib/header-tools/reduce-headers | 102 ++++++++++++-------------
>> contrib/header-tools/replace-header | 12 +--
>> contrib/header-tools/show-headers | 32 ++++----
>> 9 files changed, 177 insertions(+), 177 deletions(-)
>>
>> diff --git a/contrib/header-tools/count-headers
>> b/contrib/header-tools/count-headers
>> index 7a92596a602..9e7046a2a1e 100755
>> --- a/contrib/header-tools/count-headers
>> +++ b/contrib/header-tools/count-headers
>> @@ -1,4 +1,4 @@
>> -#! /usr/bin/python2
>> +#! /usr/bin/python3
>> import os.path
>> import sys
>> import shlex
>> @@ -45,11 +45,11 @@ if not usage and len (src) > 0:
>> l.sort (key=lambda tup:tup[0], reverse=True)
>> for f in l:
>> - print str (f[0]) + " : " + f[1]
>> + print(str (f[0]) + " : " + f[1])
>> else:
>> - print "count-headers file1 [filen]"
>> - print "Count the number of occurrences of all includes across all
>> listed files"
>> + print("count-headers file1 [filen]")
>> + print("Count the number of occurrences of all includes across all
>> listed files")
>> diff --git a/contrib/header-tools/gcc-order-headers
>> b/contrib/header-tools/gcc-order-headers
>> index ee76cba4b18..8d2e3cf6e77 100755
>> --- a/contrib/header-tools/gcc-order-headers
>> +++ b/contrib/header-tools/gcc-order-headers
>> @@ -1,11 +1,11 @@
>> -#! /usr/bin/python2
>> +#! /usr/bin/python3
>> import os
>> import sys
>> import shlex
>> import re
>> from headerutils import *
>> -import Queue
>> +import queue
>> file_list = list ()
>> usage = False
>> @@ -77,19 +77,19 @@ def create_master_list (fn, verbose):
>> if fn != "diagnostic.h" and fn != "diagnostic-core.h":
>> master_list.append (fn)
>> if (verbose):
>> - print fn + " included by: " + includes[fn][0]
>> + print(fn + " included by: " + includes[fn][0])
>> def print_dups ():
>> if dups:
>> - print "\nduplicated includes"
>> + print("\nduplicated includes")
>> for i in dups:
>> string = "dup : " + i + " : "
>> string += includes[i][0]
>> for i2 in dups[i]:
>> string += ", "+i2
>> - print string
>> + print(string)
>> def process_known_dups ():
>> @@ -230,11 +230,11 @@ for arg in sys.argv[1:]:
>> elif arg[0:2] == "-v":
>> show_master = True
>> else:
>> - print "Error: unrecognized option " + arg
>> + print("Error: unrecognized option " + arg)
>> elif os.path.exists(arg):
>> file_list.append (arg)
>> else:
>> - print "Error: file " + arg + " Does not exist."
>> + print("Error: file " + arg + " Does not exist.")
>> usage = True
>> if not file_list and not show_master:
>> @@ -242,7 +242,7 @@ if not file_list and not show_master:
>> if not usage and not os.path.exists ("coretypes.h"):
>> usage = True
>> - print "Error: Must run command in main gcc source directory
>> containing coretypes.h\n"
>> + print("Error: Must run command in main gcc source directory
>> containing coretypes.h\n")
>> # process diagnostic.h first.. it's special since GCC_DIAG_STYLE
>> can be
>> # overridden by languages, but must be done so by a file included
>> BEFORE it.
>> @@ -268,20 +268,20 @@ process_known_dups ()
>> desired_order = master_list
>> if show_master:
>> - print " Canonical order of gcc include files: "
>> + print(" Canonical order of gcc include files: ")
>> for x in master_list:
>> - print x
>> - print " "
>> + print(x)
>> + print(" ")
>> if usage:
>> - print "gcc-order-headers [-i] [-v] file1 [filen]"
>> - print " Ensures gcc's headers files are included in a
>> normalized form with"
>> - print " redundant headers removed. The original files are
>> saved in filename.bak"
>> - print " Outputs a list of files which changed."
>> - print " -i ignore conditional compilation."
>> - print " Use after examining the file to be sure includes within
>> #ifs are safe"
>> - print " Any headers within conditional sections will be ignored."
>> - print " -v Show the canonical order of known headers"
>> + print("gcc-order-headers [-i] [-v] file1 [filen]")
>> + print(" Ensures gcc's headers files are included in a
>> normalized form with")
>> + print(" redundant headers removed. The original files are
>> saved in filename.bak")
>> + print(" Outputs a list of files which changed.")
>> + print(" -i ignore conditional compilation.")
>> + print(" Use after examining the file to be sure includes within
>> #ifs are safe")
>> + print(" Any headers within conditional sections will be ignored.")
>> + print(" -v Show the canonical order of known headers")
>> sys.exit(0)
>> @@ -303,7 +303,7 @@ for fn in file_list:
>> if ii_include_list_cond (iinfo):
>> if not ignore_conditional:
>> - print fn + ": Cannot process due to conditional compilation of
>> includes"
>> + print(fn + ": Cannot process due to conditional compilation of
>> includes")
>> didnt_do.append (fn)
>> src = list ()
>> @@ -329,8 +329,8 @@ for fn in file_list:
>> src_line[nm] = ii_src_line(iinfo)[d]
>> if src_line[nm].find("/*") != -1 and src_line[nm].find("*/")
>> == -1:
>> # this means we have a multi line comment, abort!'
>> - print fn + ": Cannot process due to a multi-line comment :"
>> - print " " + src_line[nm]
>> + print(fn + ": Cannot process due to a multi-line comment :")
>> + print(" " + src_line[nm])
>> if fn not in didnt_do:
>> didnt_do.append (fn)
>> src = list ()
>> @@ -375,22 +375,22 @@ for fn in file_list:
>> for line in new_src:
>> fl.write (line)
>> fl.close ()
>> - print fn
>> + print(fn )
>> if didnt_do:
>> - print "\n\n Did not process the following files due to conditional
>> dependencies:"
>> + print("\n\n Did not process the following files due to conditional
>> dependencies:")
>> str = ""
>> for x in didnt_do:
>> str += x + " "
>> - print str
>> - print "\n"
>> - print "Please examine to see if they are safe to process, and
>> re-try with -i. "
>> - print "Safeness is determined by checking whether any of the
>> reordered headers are"
>> - print "within a conditional and could be hauled out of the
>> conditional, thus changing"
>> - print "what the compiler will see."
>> - print "Multi-line comments after a #include can also cause
>> failuer, they must be turned"
>> - print "into single line comments or removed."
>> + print(str)
>> + print("\n")
>> + print("Please examine to see if they are safe to process, and
>> re-try with -i. ")
>> + print("Safeness is determined by checking whether any of the
>> reordered headers are")
>> + print("within a conditional and could be hauled out of the
>> conditional, thus changing")
>> + print("what the compiler will see.")
>> + print("Multi-line comments after a #include can also cause
>> failuer, they must be turned")
>> + print("into single line comments or removed.")
>> diff --git a/contrib/header-tools/graph-header-logs
>> b/contrib/header-tools/graph-header-logs
>> index e537aaeac0c..b4038f57628 100755
>> --- a/contrib/header-tools/graph-header-logs
>> +++ b/contrib/header-tools/graph-header-logs
>> @@ -1,4 +1,4 @@
>> -#! /usr/bin/python2
>> +#! /usr/bin/python3
>> import os.path
>> import sys
>> import shlex
>> @@ -53,7 +53,7 @@ ignore = [ "coretypes_h",
>> def process_log_file (header, logfile):
>> if header_roots.get (header) != None:
>> - print "Error: already processed log file: " + header + ".log"
>> + print("Error: already processed log file: " + header + ".log")
>> return
>> hname = pretty_name (header)
>> header_roots[hname] = { }
>> @@ -66,7 +66,7 @@ def process_log_file (header, logfile):
>> if newinc:
>> incfrom = list()
>> newinc = False
>> - fn = re.findall(ur".*/(.*?):", line)
>> + fn = re.findall(r".*/(.*?):", line)
>> if len(fn) != 1:
>> continue
>> if fn[0][-2:] != ".h":
>> @@ -76,16 +76,16 @@ def process_log_file (header, logfile):
>> incfrom.append (n)
>> continue
>> newinc = True
>> - note = re.findall (ur"^.*note: (.*)", line)
>> + note = re.findall (r"^.*note: (.*)", line)
>> if len(note) > 0:
>> sline.append (("note", note[0]))
>> else:
>> - err_msg = re.findall (ur"^.*: error: (.*)", line)
>> + err_msg = re.findall (r"^.*: error: (.*)", line)
>> if len(err_msg) == 1:
>> msg = err_msg[0]
>> if (len (re.findall("error: forward declaration", line)))
>> != 0:
>> continue
>> - path = re.findall (ur"^(.*?):.*error: ", line)
>> + path = re.findall (r"^(.*?):.*error: ", line)
>> if len(path) != 1:
>> continue
>> if path[0][-2:] != ".h":
>> @@ -95,7 +95,7 @@ def process_log_file (header, logfile):
>> continue
>> sline.append (("error", msg, fname, incfrom))
>> - print str(len(sline)) + " lines to process"
>> + print(str(len(sline)) + " lines to process")
>> lastline = "note"
>> for line in sline:
>> if line[0] != "note" and lastline[0] == "error":
>> @@ -111,7 +111,7 @@ def process_log_file (header, logfile):
>> if ee not in extra_edges:
>> extra_edges.append (ee)
>> fname = t
>> - print string
>> + print(string)
>> if hname not in nodes:
>> nodes.append(hname)
>> @@ -125,7 +125,7 @@ def process_log_file (header, logfile):
>> if header_roots[hname].get(fname) == None:
>> header_roots[hname][fname] = list()
>> if msg not in header_roots[hname][fname]:
>> - print string + ofname + " : " +msg
>> + print(string + ofname + " : " +msg)
>> header_roots[hname][fname].append (msg)
>> lastline = line;
>> @@ -181,10 +181,10 @@ def build_dot_file (file_list):
>> if verbose:
>> depcount.sort(key=lambda tup:tup[2])
>> for x in depcount:
>> - print " ("+str(x[2])+ ") : " + x[0] + " -> " + x[1]
>> + print(" ("+str(x[2])+ ") : " + x[0] + " -> " + x[1])
>> if (x[2] <= verbosity):
>> for l in header_roots[x[0]][x[1]]:
>> - print " " + l
>> + print(" " + l)
>> output.write ("}\n");
>> @@ -205,7 +205,7 @@ for arg in sys.argv[1:]:
>> if (verbosity == 9):
>> verbosity = 9999
>> elif arg[0:1] == "-":
>> - print "Unrecognized option " + arg
>> + print("Unrecognized option " + arg)
>> dohelp = True
>> else:
>> files.append (arg)
>> @@ -214,16 +214,16 @@ if len(sys.argv) == 1:
>> dohelp = True
>> if dohelp:
>> - print "Parses the log files from the reduce-headers tool to generate"
>> - print "dependency graphs for the include web for specified files."
>> - print "Usage: [-nnum] [-h] [-v[n]] [-ooutput] file1 [[file2] ...
>> [filen]]"
>> - print " -ooutput : Specifies output to output.dot and
>> output.png"
>> - print " Defaults to 'graph.dot and graph.png"
>> - print " -vn : verbose mode, shows the number of connections,
>> and if n"
>> - print " is specified, show the messages if # < n. 9 is
>> infinity"
>> - print " -h : help"
>> + print("Parses the log files from the reduce-headers tool to
>> generate")
>> + print("dependency graphs for the include web for specified files.")
>> + print("Usage: [-nnum] [-h] [-v[n]] [-ooutput] file1 [[file2] ...
>> [filen]]")
>> + print(" -ooutput : Specifies output to output.dot and
>> output.png")
>> + print(" Defaults to 'graph.dot and graph.png")
>> + print(" -vn : verbose mode, shows the number of connections,
>> and if n")
>> + print(" is specified, show the messages if # < n. 9 is
>> infinity")
>> + print(" -h : help")
>> else:
>> - print files
>> + print(files)
>> build_dot_file (files)
>> os.system ("dot -Tpng " + dotname + " -o" + graphname)
>> diff --git a/contrib/header-tools/graph-include-web
>> b/contrib/header-tools/graph-include-web
>> index 47576a177de..dc2813bf1aa 100755
>> --- a/contrib/header-tools/graph-include-web
>> +++ b/contrib/header-tools/graph-include-web
>> @@ -1,4 +1,4 @@
>> -#! /usr/bin/python2
>> +#! /usr/bin/python3
>> import os.path
>> import sys
>> import shlex
>> @@ -82,7 +82,7 @@ for arg in sys.argv[1:]:
>> noterm = True
>> elif arg[0:2] == "-f":
>> if not os.path.exists (arg[2:]):
>> - print "Option " + arg +" doesn't specify a proper file"
>> + print("Option " + arg +" doesn't specify a proper file")
>> dohelp = True
>> else:
>> sfile = open (arg[2:], "r")
>> @@ -93,7 +93,7 @@ for arg in sys.argv[1:]:
>> elif arg[0:2] == "-n":
>> edge_thresh = int (arg[2:])
>> elif arg[0:1] == "-":
>> - print "Unrecognized option " + arg
>> + print("Unrecognized option " + arg)
>> dohelp = True
>> else:
>> files.append (arg)
>> @@ -102,17 +102,17 @@ if len(sys.argv) == 1:
>> dohelp = True
>> if dohelp:
>> - print "Generates a graph of the include web for specified files."
>> - print "Usage: [-finput_file] [-h] [-ooutput] [file1 ... [filen]]"
>> - print " -finput_file : Input file containing a list of files to
>> process."
>> - print " -ooutput : Specifies output to output.dot and output.png."
>> - print " defaults to graph.dot and graph.png."
>> - print " -nnum : Specifies the # of edges beyond which sfdp is
>> invoked. def=0."
>> - print " -a : Aggregate all .c files to 1 file. Shows only
>> include web."
>> - print " -at : Aggregate, but don't include terminal.h to .c links."
>> - print " -h : Print this help."
>> + print("Generates a graph of the include web for specified files.")
>> + print("Usage: [-finput_file] [-h] [-ooutput] [file1 ... [filen]]")
>> + print(" -finput_file : Input file containing a list of files to
>> process.")
>> + print(" -ooutput : Specifies output to output.dot and output.png.")
>> + print(" defaults to graph.dot and graph.png.")
>> + print(" -nnum : Specifies the # of edges beyond which sfdp is
>> invoked. def=0.")
>> + print(" -a : Aggregate all .c files to 1 file. Shows only
>> include web.")
>> + print(" -at : Aggregate, but don't include terminal.h to .c links.")
>> + print(" -h : Print this help.")
>> else:
>> - print files
>> + print(files)
>> build_dot_file (files)
>> if edges > edge_thresh:
>> os.system ("sfdp -Tpng " + dotname + " -o" + graphname)
>> diff --git a/contrib/header-tools/headerutils.py
>> b/contrib/header-tools/headerutils.py
>> index 95c47fb4b69..3f87b8bd4ab 100755
>> --- a/contrib/header-tools/headerutils.py
>> +++ b/contrib/header-tools/headerutils.py
>> @@ -1,4 +1,4 @@
>> -#! /usr/bin/python2
>> +#! /usr/bin/python3
>> import os.path
>> import sys
>> import shlex
>> @@ -10,7 +10,7 @@ import pickle
>> import multiprocessing
>> def find_pound_include (line, use_outside, use_slash):
>> - inc = re.findall (ur"^\s*#\s*include\s*\"(.+?)\"", line)
>> + inc = re.findall (r"^\s*#\s*include\s*\"(.+?)\"", line)
>> if len(inc) == 1:
>> nm = inc[0]
>> if use_outside or os.path.exists (nm):
>> @@ -19,17 +19,17 @@ def find_pound_include (line, use_outside,
>> use_slash):
>> return ""
>> def find_system_include (line):
>> - inc = re.findall (ur"^\s*#\s*include\s*<(.+?)>", line)
>> + inc = re.findall (r"^\s*#\s*include\s*<(.+?)>", line)
>> if len(inc) == 1:
>> return inc[0]
>> return ""
>> def find_pound_define (line):
>> - inc = re.findall (ur"^\s*#\s*define ([A-Za-z0-9_]+)", line)
>> + inc = re.findall (r"^\s*#\s*define ([A-Za-z0-9_]+)", line)
>> if len(inc) != 0:
>> if len(inc) > 1:
>> - print "What? more than 1 match in #define??"
>> - print inc
>> + print("What? more than 1 match in #define??")
>> + print(inc)
>> sys.exit(5)
>> return inc[0];
>> return ""
>> @@ -49,26 +49,26 @@ def is_pound_endif (line):
>> return False
>> def find_pound_if (line):
>> - inc = re.findall (ur"^\s*#\s*if\s+(.*)", line)
>> + inc = re.findall (r"^\s*#\s*if\s+(.*)", line)
>> if len(inc) == 0:
>> - inc = re.findall (ur"^\s*#\s*elif\s+(.*)", line)
>> + inc = re.findall (r"^\s*#\s*elif\s+(.*)", line)
>> if len(inc) > 0:
>> - inc2 = re.findall (ur"defined\s*\((.+?)\)", inc[0])
>> - inc3 = re.findall (ur"defined\s+([a-zA-Z0-9_]+)", inc[0])
>> + inc2 = re.findall (r"defined\s*\((.+?)\)", inc[0])
>> + inc3 = re.findall (r"defined\s+([a-zA-Z0-9_]+)", inc[0])
>> for yy in inc3:
>> inc2.append (yy)
>> return inc2
>> else:
>> - inc = re.findall (ur"^\s*#\s*ifdef\s(.*)", line)
>> + inc = re.findall (r"^\s*#\s*ifdef\s(.*)", line)
>> if len(inc) == 0:
>> - inc = re.findall (ur"^\s*#\s*ifndef\s(.*)", line)
>> + inc = re.findall (r"^\s*#\s*ifndef\s(.*)", line)
>> if len(inc) > 0:
>> inc2 = re.findall ("[A-Za-z_][A-Za-z_0-9]*", inc[0])
>> return inc2
>> if len(inc) == 0:
>> return list ()
>> - print "WTF. more than one line returned for find_pound_if"
>> - print inc
>> + print("WTF. more than one line returned for find_pound_if")
>> + print(inc)
>> sys.exit(5)
>> @@ -248,8 +248,8 @@ def find_gcc_bld_dir (path):
>> for y in files:
>> p = os.path.dirname (y)
>> if os.path.basename (p) == "gcc":
>> - blddir = p
>> - break
>> + blddir = p
>> + break
>> return blddir
>> @@ -424,7 +424,7 @@ def find_replace_include (find, replace, src):
>> # pass in a require and provide dictionary to be read in.
>> def read_require_provides (require, provide):
>> if not os.path.exists ("require-provide.master"):
>> - print "require-provide.master file is not available. please run
>> data collection."
>> + print("require-provide.master file is not available. please run
>> data collection.")
>> sys.exit(1)
>> incl_list = open("require-provide.master").read().splitlines()
>> for f in incl_list:
>> @@ -501,7 +501,7 @@ def spawn_makes (command_list):
>> c = subprocess.Popen(command, bufsize=-1, stdout=devnull,
>> stderr=subprocess.PIPE, shell=True)
>> proc_res.append ((c, tname))
>> - print text[:-2]
>> + print(text[:-2])
>> for p in proc_res:
>> output = p[0].communicate()
>> diff --git a/contrib/header-tools/included-by
>> b/contrib/header-tools/included-by
>> index 9947fee6b2b..505b6fa91c2 100755
>> --- a/contrib/header-tools/included-by
>> +++ b/contrib/header-tools/included-by
>> @@ -1,4 +1,4 @@
>> -#! /usr/bin/python2
>> +#! /usr/bin/python3
>> import os.path
>> import sys
>> import shlex
>> @@ -43,7 +43,7 @@ for x in sys.argv[1:]:
>> file_list = open (x[2:]).read().splitlines()
>> scanfiles = False
>> elif x[0] == "-":
>> - print "Error: Unknown option " + x
>> + print("Error: Unknown option " + x)
>> usage = True
>> else:
>> src.append (x)
>> @@ -89,21 +89,21 @@ if not usage:
>> if len (careabout) < num_match:
>> output = ""
>> if output != "":
>> - print output
>> + print(output)
>> else:
>> - print "included-by [-h] [-i] [-c] [-v] [-a] [-nx] file1 [file2]
>> ... [filen]"
>> - print "find the list of all files in subdirectories that include
>> any of "
>> - print "the listed files. processed to a depth of 3 subdirs"
>> - print " -h : Show this message"
>> - print " -i : process only header files (*.h) for #include"
>> - print " -c : process only source files (*.c *.cc) for #include"
>> - print " If nothing is specified, defaults to -i -c"
>> - print " -s : Same as -c."
>> - print " -v : Show which include(s) were found"
>> - print " -nx : Only list files which have at least x different
>> matches. Default = 1"
>> - print " -a : Show only files which all listed files are included"
>> - print " This is equivilent to -nT where T == # of items in
>> list"
>> - print " -flistfile : Show only files contained in the list of files"
>> + print("included-by [-h] [-i] [-c] [-v] [-a] [-nx] file1 [file2]
>> ... [filen]")
>> + print("find the list of all files in subdirectories that include
>> any of ")
>> + print("the listed files. processed to a depth of 3 subdirs")
>> + print(" -h : Show this message")
>> + print(" -i : process only header files (*.h) for #include")
>> + print(" -c : process only source files (*.c *.cc) for #include")
>> + print(" If nothing is specified, defaults to -i -c")
>> + print(" -s : Same as -c.")
>> + print(" -v : Show which include(s) were found")
>> + print(" -nx : Only list files which have at least x different
>> matches. Default = 1")
>> + print(" -a : Show only files which all listed files are included")
>> + print(" This is equivilent to -nT where T == # of items in
>> list")
>> + print(" -flistfile : Show only files contained in the list of
>> files")
>> diff --git a/contrib/header-tools/reduce-headers
>> b/contrib/header-tools/reduce-headers
>> index 7d236e30688..e1c809ad904 100755
>> --- a/contrib/header-tools/reduce-headers
>> +++ b/contrib/header-tools/reduce-headers
>> @@ -1,4 +1,4 @@
>> -#! /usr/bin/python2
>> +#! /usr/bin/python3
>> import os.path
>> import sys
>> import shlex
>> @@ -177,7 +177,7 @@ def build_target_dict (bld_dir, just_these):
>> target = t.strip()
>> tpath = bld_dir + "/" + target
>> if not os.path.exists (tpath + "/gcc"):
>> - print "Error: gcc build directory for target " + t + "
>> Does not exist: " + tpath + "/gcc"
>> + print("Error: gcc build directory for target " + t + "
>> Does not exist: " + tpath + "/gcc")
>> error = True
>> else:
>> target_dict[target] = tpath
>> @@ -209,7 +209,7 @@ def find_targets (src_file):
>> targ_list = list()
>> obj_name = get_obj_name (src_file)
>> if not obj_name:
>> - print "Error: " + src_file + " - Cannot determine object name."
>> + print("Error: " + src_file + " - Cannot determine object name.")
>> return list()
>> # Put the high priority targets which tend to trigger failures
>> first
>> @@ -263,22 +263,22 @@ def try_to_remove (src_file, h_list, verbose):
>> hostbuild = True
>> if not target_dict:
>> summary = src_file + ": Target builds are required for
>> config files. None found."
>> - print summary
>> + print(summary)
>> return summary
>> if not targ_list:
>> summary =src_file + ": Cannot find any targets which build
>> this file."
>> - print summary
>> + print(summary)
>> return summary
>> if hostbuild:
>> # confirm it actually builds before we do anything
>> - print "Confirming source file builds"
>> + print("Confirming source file builds")
>> res = get_make_output (build_dir + "/gcc", "all")
>> if res[0] != 0:
>> message = "Error: " + src_file + " does not build currently."
>> summary = src_file + " does not build on host."
>> - print message
>> - print res[1]
>> + print(message)
>> + print(res[1])
>> if verbose:
>> verbose.write (message + "\n")
>> verbose.write (res[1]+ "\n")
>> @@ -314,7 +314,7 @@ def try_to_remove (src_file, h_list, verbose):
>> lookfor = ii_src_line(src_info)[inc_file]
>> src_tmp.remove (lookfor)
>> message = "Trying " + src_file + " without " + inc_file
>> - print message
>> + print(message)
>> if verbose:
>> verbose.write (message + "\n")
>> out = open(src_file, "w")
>> @@ -350,7 +350,7 @@ def try_to_remove (src_file, h_list, verbose):
>> message = "Passed host and target builds"
>> if keep:
>> - print message + "\n"
>> + print(message + "\n")
>> if (rc != 0):
>> if verbose:
>> @@ -392,7 +392,7 @@ def try_to_remove (src_file, h_list, verbose):
>> if ii_path(iinfo) == "build" and not target_dict:
>> keep = True
>> text = message + " : Will not remove a build file
>> without some targets."
>> - print text
>> + print(text)
>> ilog = open(src_file+".log","a")
>> ilog.write (text +"\n")
>> ilog.write
>> ("============================================\n");
>> @@ -414,7 +414,7 @@ def try_to_remove (src_file, h_list, verbose):
>> text = message + ", but must keep " + inc_file
>> + " because it provides " + dep
>> if because.get(dep) != None:
>> text = text + " Possibly required by " +
>> because[dep]
>> - print text
>> + print(text)
>> ilog = open(inc_file+".log","a")
>> ilog.write (because[dep]+": Requires [dep] in
>> "+src_file+"\n")
>> ilog.write
>> ("============================================\n");
>> @@ -444,7 +444,7 @@ def try_to_remove (src_file, h_list, verbose):
>> src_tmp = copy.deepcopy (src_data)
>> else:
>> - print message + " --> removing " + inc_file + "\n"
>> + print(message + " --> removing " + inc_file + "\n")
>> rmcount += 1
>> if verbose:
>> verbose.write (message + " --> removing " + inc_file +
>> "\n")
>> @@ -454,7 +454,7 @@ def try_to_remove (src_file, h_list, verbose):
>> remove_count[inc_file] += 1
>> src_data = copy.deepcopy (src_tmp)
>> except:
>> - print "Interuption: restoring original file"
>> + print("Interuption: restoring original file")
>> out = open(src_file, "w")
>> for line in src_orig:
>> out.write (line)
>> @@ -473,8 +473,8 @@ def try_to_remove (src_file, h_list, verbose):
>> rc = res[0]
>> if (rc != 0):
>> # host build failed! return to original version
>> - print "Error: " + src_file + " Failed to bootstrap at end!!!
>> restoring."
>> - print " Bad version at " + src_file + ".bad"
>> + print("Error: " + src_file + " Failed to bootstrap at end!!!
>> restoring.")
>> + print(" Bad version at " + src_file + ".bad")
>> os.rename (src_file, src_file + ".bad")
>> out = open(src_file, "w")
>> for line in src_orig:
>> @@ -486,7 +486,7 @@ def try_to_remove (src_file, h_list, verbose):
>> summary = src_file + ": No change."
>> else:
>> summary = src_file + ": Reduction performed, "+str(rmcount)+"
>> includes removed."
>> - print summary
>> + print(summary)
>> return summary
>> only_h = list ()
>> @@ -513,11 +513,11 @@ for x in sys.argv[1:]:
>> elif x[0:2] == "-t":
>> target_dir = x[2:]
>> elif x[0] == "-":
>> - print "Error: Unrecognized option " + x
>> + print("Error: Unrecognized option " + x)
>> usgae = True
>> else:
>> if not os.path.exists (x):
>> - print "Error: specified file " + x + " does not exist."
>> + print("Error: specified file " + x + " does not exist.")
>> usage = True
>> else:
>> src.append (x)
>> @@ -526,50 +526,50 @@ if target_dir:
>> build_target_dict (target_dir, only_targs)
>> if build_dir == "" and target_dir == "":
>> - print "Error: Must specify a build directory, and/or a target
>> directory."
>> + print("Error: Must specify a build directory, and/or a target
>> directory.")
>> usage = True
>> if build_dir and not os.path.exists (build_dir):
>> - print "Error: specified build directory does not exist : " +
>> build_dir
>> + print("Error: specified build directory does not exist : " +
>> build_dir)
>> usage = True
>> if target_dir and not os.path.exists (target_dir):
>> - print "Error: specified target directory does not exist : " +
>> target_dir
>> + print("Error: specified target directory does not exist : " +
>> target_dir)
>> usage = True
>> if usage:
>> - print "Attempts to remove extraneous include files from source
>> files."
>> - print " "
>> - print "Should be run from the main gcc source directory, and works
>> on a target"
>> - print "directory, as we attempt to make the 'all' target."
>> - print " "
>> - print "By default, gcc-reorder-includes is run on each file before
>> attempting"
>> - print "to remove includes. this removes duplicates and puts some
>> headers in a"
>> - print "canonical ordering"
>> - print " "
>> - print "The build directory should be ready to compile via make.
>> Time is saved"
>> - print "if the build is already complete, so that only changes need
>> to be built."
>> - print " "
>> - print "Usage: [options] file1.c [file2.c] ... [filen.c]"
>> - print " -bdir : the root build directory to attempt
>> buiding .o files."
>> - print " -tdir : the target build directory"
>> - print " -d : Ignore conditional macro dependencies."
>> - print " "
>> - print " -Dmacro : Ignore a specific macro for dependencies"
>> - print " -Ttarget : Only consider target in target directory."
>> - print " -fheader : Specifies a specific .h file to be
>> considered."
>> - print " "
>> - print " -D, -T, and -f can be specified mulitple times and
>> are aggregated."
>> - print " "
>> - print " The original file will be in filen.bak"
>> - print " "
>> + print("Attempts to remove extraneous include files from source
>> files.")
>> + print(" ")
>> + print("Should be run from the main gcc source directory, and works
>> on a target")
>> + print("directory, as we attempt to make the 'all' target.")
>> + print(" ")
>> + print("By default, gcc-reorder-includes is run on each file before
>> attempting")
>> + print("to remove includes. this removes duplicates and puts some
>> headers in a")
>> + print("canonical ordering")
>> + print(" ")
>> + print("The build directory should be ready to compile via make.
>> Time is saved")
>> + print("if the build is already complete, so that only changes need
>> to be built.")
>> + print(" ")
>> + print("Usage: [options] file1.c [file2.c] ... [filen.c]")
>> + print(" -bdir : the root build directory to attempt
>> buiding .o files.")
>> + print(" -tdir : the target build directory")
>> + print(" -d : Ignore conditional macro dependencies.")
>> + print(" ")
>> + print(" -Dmacro : Ignore a specific macro for dependencies")
>> + print(" -Ttarget : Only consider target in target directory.")
>> + print(" -fheader : Specifies a specific .h file to be
>> considered.")
>> + print(" ")
>> + print(" -D, -T, and -f can be specified mulitple times and
>> are aggregated.")
>> + print(" ")
>> + print(" The original file will be in filen.bak")
>> + print(" ")
>> sys.exit (0)
>> if only_h:
>> - print "Attempting to remove only these files:"
>> + print("Attempting to remove only these files:")
>> for x in only_h:
>> - print x
>> - print " "
>> + print(x)
>> + print(" ")
>> logfile = open("reduce-headers.log","w")
>> @@ -583,7 +583,7 @@ ilog = open("reduce-headers.sum","a")
>> ilog.write
>> ("===============================================================\n")
>> for x in remove_count:
>> msg = x + ": Removed " + str(remove_count[x]) + " times."
>> - print msg
>> + print(msg)
>> logfile.write (msg + "\n")
>> ilog.write (msg + "\n")
>> diff --git a/contrib/header-tools/replace-header
>> b/contrib/header-tools/replace-header
>> index ce20096a453..fd07d2c88e5 100755
>> --- a/contrib/header-tools/replace-header
>> +++ b/contrib/header-tools/replace-header
>> @@ -1,4 +1,4 @@
>> -#! /usr/bin/python2
>> +#! /usr/bin/python3
>> import os.path
>> import sys
>> import shlex
>> @@ -20,7 +20,7 @@ for x in sys.argv[1:]:
>> elif x[0:2] == "-r":
>> replace.append (x[2:])
>> elif x[0:1] == "-":
>> - print "Error: unrecognized option " + x
>> + print("Error: unrecognized option " + x)
>> usage = True
>> else:
>> files.append (x)
>> @@ -29,25 +29,25 @@ if find == "":
>> usage = True
>> if usage:
>> - print "replace-header -fheader -rheader [-rheader] file1 [filen.]"
>> + print("replace-header -fheader -rheader [-rheader] file1 [filen.]")
>> sys.exit(0)
>> string = ""
>> for x in replace:
>> string = string + " '"+x+"'"
>> -print "Replacing '"+find+"' with"+string
>> +print("Replacing '"+find+"' with"+string)
>> for x in files:
>> src = readwholefile (x)
>> src = find_replace_include (find, replace, src)
>> if (len(src) > 0):
>> - print x + ": Changed"
>> + print(x + ": Changed")
>> out = open(x, "w")
>> for line in src:
>> out.write (line);
>> out.close ()
>> else:
>> - print x
>> + print(x)
>> diff --git a/contrib/header-tools/show-headers
>> b/contrib/header-tools/show-headers
>> index cb949ec1f44..86eceec71b6 100755
>> --- a/contrib/header-tools/show-headers
>> +++ b/contrib/header-tools/show-headers
>> @@ -1,4 +1,4 @@
>> -#! /usr/bin/python2
>> +#! /usr/bin/python3
>> import os.path
>> import sys
>> import shlex
>> @@ -93,17 +93,17 @@ for x in sys.argv[1:]:
>> if len(src) != 1:
>> usage = True
>> elif not os.path.exists (src[0]):
>> - print src[0] + ": Requested source file does not exist.\n"
>> + print(src[0] + ": Requested source file does not exist.\n")
>> usage = True
>> if usage:
>> - print "show-headers [-idir] [-sfilen] file1 "
>> - print " "
>> - print " Show a hierarchical visual format how many times each
>> header file"
>> - print " is included in a source file. Should be run from the
>> source directory"
>> - print " files from find-include-depends"
>> - print " -s : search for a header, and point it out."
>> - print " -i : Specifies additonal directories to search for
>> includes."
>> + print("show-headers [-idir] [-sfilen] file1 ")
>> + print(" ")
>> + print(" Show a hierarchical visual format how many times each
>> header file")
>> + print(" is included in a source file. Should be run from the
>> source directory")
>> + print(" files from find-include-depends")
>> + print(" -s : search for a header, and point it out.")
>> + print(" -i : Specifies additonal directories to search for
>> includes.")
>> sys.exit(0)
>> @@ -114,10 +114,10 @@ if extradir:
>> blddir = find_gcc_bld_dir ("../..")
>> if blddir:
>> - print "Using build directory: " + blddir
>> + print("Using build directory: " + blddir)
>> incl_dirs.insert (0, blddir)
>> else:
>> - print "Could not find a build directory, better results if you
>> specify one with -i"
>> + print("Could not find a build directory, better results if you
>> specify one with -i")
>> # search path is now ".", blddir, extradirs_from_-i,
>> built_in_incl_dirs
>> incl_dirs.insert (0, ".")
>> @@ -137,15 +137,15 @@ for line in data:
>> if d and d[-2:] == ".h":
>> process_include (d, 1)
>> -print "\n" + x
>> +print("\n" + x)
>> for line in output:
>> - print line
>> + print(line)
>> if highlight:
>> - print " "
>> + print(" ")
>> for h in summary:
>> - print h + " is included by source file."
>> + print(h + " is included by source file.")
>> for h in highlight:
>> if h not in summary:
>> - print h + " is not included by source file."
>> + print(h + " is not included by source file.")
@@ -1,4 +1,4 @@
-#! /usr/bin/python2
+#! /usr/bin/python3
import os.path
import sys
import shlex
@@ -45,11 +45,11 @@ if not usage and len (src) > 0:
l.sort (key=lambda tup:tup[0], reverse=True)
for f in l:
- print str (f[0]) + " : " + f[1]
+ print(str (f[0]) + " : " + f[1])
else:
- print "count-headers file1 [filen]"
- print "Count the number of occurrences of all includes across all listed files"
+ print("count-headers file1 [filen]")
+ print("Count the number of occurrences of all includes across all listed files")
@@ -1,11 +1,11 @@
-#! /usr/bin/python2
+#! /usr/bin/python3
import os
import sys
import shlex
import re
from headerutils import *
-import Queue
+import queue
file_list = list ()
usage = False
@@ -77,19 +77,19 @@ def create_master_list (fn, verbose):
if fn != "diagnostic.h" and fn != "diagnostic-core.h":
master_list.append (fn)
if (verbose):
- print fn + " included by: " + includes[fn][0]
+ print(fn + " included by: " + includes[fn][0])
def print_dups ():
if dups:
- print "\nduplicated includes"
+ print("\nduplicated includes")
for i in dups:
string = "dup : " + i + " : "
string += includes[i][0]
for i2 in dups[i]:
string += ", "+i2
- print string
+ print(string)
def process_known_dups ():
@@ -230,11 +230,11 @@ for arg in sys.argv[1:]:
elif arg[0:2] == "-v":
show_master = True
else:
- print "Error: unrecognized option " + arg
+ print("Error: unrecognized option " + arg)
elif os.path.exists(arg):
file_list.append (arg)
else:
- print "Error: file " + arg + " Does not exist."
+ print("Error: file " + arg + " Does not exist.")
usage = True
if not file_list and not show_master:
@@ -242,7 +242,7 @@ if not file_list and not show_master:
if not usage and not os.path.exists ("coretypes.h"):
usage = True
- print "Error: Must run command in main gcc source directory containing coretypes.h\n"
+ print("Error: Must run command in main gcc source directory containing coretypes.h\n")
# process diagnostic.h first.. it's special since GCC_DIAG_STYLE can be
# overridden by languages, but must be done so by a file included BEFORE it.
@@ -268,20 +268,20 @@ process_known_dups ()
desired_order = master_list
if show_master:
- print " Canonical order of gcc include files: "
+ print(" Canonical order of gcc include files: ")
for x in master_list:
- print x
- print " "
+ print(x)
+ print(" ")
if usage:
- print "gcc-order-headers [-i] [-v] file1 [filen]"
- print " Ensures gcc's headers files are included in a normalized form with"
- print " redundant headers removed. The original files are saved in filename.bak"
- print " Outputs a list of files which changed."
- print " -i ignore conditional compilation."
- print " Use after examining the file to be sure includes within #ifs are safe"
- print " Any headers within conditional sections will be ignored."
- print " -v Show the canonical order of known headers"
+ print("gcc-order-headers [-i] [-v] file1 [filen]")
+ print(" Ensures gcc's headers files are included in a normalized form with")
+ print(" redundant headers removed. The original files are saved in filename.bak")
+ print(" Outputs a list of files which changed.")
+ print(" -i ignore conditional compilation.")
+ print(" Use after examining the file to be sure includes within #ifs are safe")
+ print(" Any headers within conditional sections will be ignored.")
+ print(" -v Show the canonical order of known headers")
sys.exit(0)
@@ -303,7 +303,7 @@ for fn in file_list:
if ii_include_list_cond (iinfo):
if not ignore_conditional:
- print fn + ": Cannot process due to conditional compilation of includes"
+ print(fn + ": Cannot process due to conditional compilation of includes")
didnt_do.append (fn)
src = list ()
@@ -329,8 +329,8 @@ for fn in file_list:
src_line[nm] = ii_src_line(iinfo)[d]
if src_line[nm].find("/*") != -1 and src_line[nm].find("*/") == -1:
# this means we have a multi line comment, abort!'
- print fn + ": Cannot process due to a multi-line comment :"
- print " " + src_line[nm]
+ print(fn + ": Cannot process due to a multi-line comment :")
+ print(" " + src_line[nm])
if fn not in didnt_do:
didnt_do.append (fn)
src = list ()
@@ -375,22 +375,22 @@ for fn in file_list:
for line in new_src:
fl.write (line)
fl.close ()
- print fn
+ print(fn )
if didnt_do:
- print "\n\n Did not process the following files due to conditional dependencies:"
+ print("\n\n Did not process the following files due to conditional dependencies:")
str = ""
for x in didnt_do:
str += x + " "
- print str
- print "\n"
- print "Please examine to see if they are safe to process, and re-try with -i. "
- print "Safeness is determined by checking whether any of the reordered headers are"
- print "within a conditional and could be hauled out of the conditional, thus changing"
- print "what the compiler will see."
- print "Multi-line comments after a #include can also cause failuer, they must be turned"
- print "into single line comments or removed."
+ print(str)
+ print("\n")
+ print("Please examine to see if they are safe to process, and re-try with -i. ")
+ print("Safeness is determined by checking whether any of the reordered headers are")
+ print("within a conditional and could be hauled out of the conditional, thus changing")
+ print("what the compiler will see.")
+ print("Multi-line comments after a #include can also cause failuer, they must be turned")
+ print("into single line comments or removed.")
@@ -1,4 +1,4 @@
-#! /usr/bin/python2
+#! /usr/bin/python3
import os.path
import sys
import shlex
@@ -53,7 +53,7 @@ ignore = [ "coretypes_h",
def process_log_file (header, logfile):
if header_roots.get (header) != None:
- print "Error: already processed log file: " + header + ".log"
+ print("Error: already processed log file: " + header + ".log")
return
hname = pretty_name (header)
header_roots[hname] = { }
@@ -66,7 +66,7 @@ def process_log_file (header, logfile):
if newinc:
incfrom = list()
newinc = False
- fn = re.findall(ur".*/(.*?):", line)
+ fn = re.findall(r".*/(.*?):", line)
if len(fn) != 1:
continue
if fn[0][-2:] != ".h":
@@ -76,16 +76,16 @@ def process_log_file (header, logfile):
incfrom.append (n)
continue
newinc = True
- note = re.findall (ur"^.*note: (.*)", line)
+ note = re.findall (r"^.*note: (.*)", line)
if len(note) > 0:
sline.append (("note", note[0]))
else:
- err_msg = re.findall (ur"^.*: error: (.*)", line)
+ err_msg = re.findall (r"^.*: error: (.*)", line)
if len(err_msg) == 1:
msg = err_msg[0]
if (len (re.findall("error: forward declaration", line))) != 0:
continue
- path = re.findall (ur"^(.*?):.*error: ", line)
+ path = re.findall (r"^(.*?):.*error: ", line)
if len(path) != 1:
continue
if path[0][-2:] != ".h":
@@ -95,7 +95,7 @@ def process_log_file (header, logfile):
continue
sline.append (("error", msg, fname, incfrom))
- print str(len(sline)) + " lines to process"
+ print(str(len(sline)) + " lines to process")
lastline = "note"
for line in sline:
if line[0] != "note" and lastline[0] == "error":
@@ -111,7 +111,7 @@ def process_log_file (header, logfile):
if ee not in extra_edges:
extra_edges.append (ee)
fname = t
- print string
+ print(string)
if hname not in nodes:
nodes.append(hname)
@@ -125,7 +125,7 @@ def process_log_file (header, logfile):
if header_roots[hname].get(fname) == None:
header_roots[hname][fname] = list()
if msg not in header_roots[hname][fname]:
- print string + ofname + " : " +msg
+ print(string + ofname + " : " +msg)
header_roots[hname][fname].append (msg)
lastline = line;
@@ -181,10 +181,10 @@ def build_dot_file (file_list):
if verbose:
depcount.sort(key=lambda tup:tup[2])
for x in depcount:
- print " ("+str(x[2])+ ") : " + x[0] + " -> " + x[1]
+ print(" ("+str(x[2])+ ") : " + x[0] + " -> " + x[1])
if (x[2] <= verbosity):
for l in header_roots[x[0]][x[1]]:
- print " " + l
+ print(" " + l)
output.write ("}\n");
@@ -205,7 +205,7 @@ for arg in sys.argv[1:]:
if (verbosity == 9):
verbosity = 9999
elif arg[0:1] == "-":
- print "Unrecognized option " + arg
+ print("Unrecognized option " + arg)
dohelp = True
else:
files.append (arg)
@@ -214,16 +214,16 @@ if len(sys.argv) == 1:
dohelp = True
if dohelp:
- print "Parses the log files from the reduce-headers tool to generate"
- print "dependency graphs for the include web for specified files."
- print "Usage: [-nnum] [-h] [-v[n]] [-ooutput] file1 [[file2] ... [filen]]"
- print " -ooutput : Specifies output to output.dot and output.png"
- print " Defaults to 'graph.dot and graph.png"
- print " -vn : verbose mode, shows the number of connections, and if n"
- print " is specified, show the messages if # < n. 9 is infinity"
- print " -h : help"
+ print("Parses the log files from the reduce-headers tool to generate")
+ print("dependency graphs for the include web for specified files.")
+ print("Usage: [-nnum] [-h] [-v[n]] [-ooutput] file1 [[file2] ... [filen]]")
+ print(" -ooutput : Specifies output to output.dot and output.png")
+ print(" Defaults to 'graph.dot and graph.png")
+ print(" -vn : verbose mode, shows the number of connections, and if n")
+ print(" is specified, show the messages if # < n. 9 is infinity")
+ print(" -h : help")
else:
- print files
+ print(files)
build_dot_file (files)
os.system ("dot -Tpng " + dotname + " -o" + graphname)
@@ -1,4 +1,4 @@
-#! /usr/bin/python2
+#! /usr/bin/python3
import os.path
import sys
import shlex
@@ -82,7 +82,7 @@ for arg in sys.argv[1:]:
noterm = True
elif arg[0:2] == "-f":
if not os.path.exists (arg[2:]):
- print "Option " + arg +" doesn't specify a proper file"
+ print("Option " + arg +" doesn't specify a proper file")
dohelp = True
else:
sfile = open (arg[2:], "r")
@@ -93,7 +93,7 @@ for arg in sys.argv[1:]:
elif arg[0:2] == "-n":
edge_thresh = int (arg[2:])
elif arg[0:1] == "-":
- print "Unrecognized option " + arg
+ print("Unrecognized option " + arg)
dohelp = True
else:
files.append (arg)
@@ -102,17 +102,17 @@ if len(sys.argv) == 1:
dohelp = True
if dohelp:
- print "Generates a graph of the include web for specified files."
- print "Usage: [-finput_file] [-h] [-ooutput] [file1 ... [filen]]"
- print " -finput_file : Input file containing a list of files to process."
- print " -ooutput : Specifies output to output.dot and output.png."
- print " defaults to graph.dot and graph.png."
- print " -nnum : Specifies the # of edges beyond which sfdp is invoked. def=0."
- print " -a : Aggregate all .c files to 1 file. Shows only include web."
- print " -at : Aggregate, but don't include terminal.h to .c links."
- print " -h : Print this help."
+ print("Generates a graph of the include web for specified files.")
+ print("Usage: [-finput_file] [-h] [-ooutput] [file1 ... [filen]]")
+ print(" -finput_file : Input file containing a list of files to process.")
+ print(" -ooutput : Specifies output to output.dot and output.png.")
+ print(" defaults to graph.dot and graph.png.")
+ print(" -nnum : Specifies the # of edges beyond which sfdp is invoked. def=0.")
+ print(" -a : Aggregate all .c files to 1 file. Shows only include web.")
+ print(" -at : Aggregate, but don't include terminal.h to .c links.")
+ print(" -h : Print this help.")
else:
- print files
+ print(files)
build_dot_file (files)
if edges > edge_thresh:
os.system ("sfdp -Tpng " + dotname + " -o" + graphname)
@@ -1,4 +1,4 @@
-#! /usr/bin/python2
+#! /usr/bin/python3
import os.path
import sys
import shlex
@@ -10,7 +10,7 @@ import pickle
import multiprocessing
def find_pound_include (line, use_outside, use_slash):
- inc = re.findall (ur"^\s*#\s*include\s*\"(.+?)\"", line)
+ inc = re.findall (r"^\s*#\s*include\s*\"(.+?)\"", line)
if len(inc) == 1:
nm = inc[0]
if use_outside or os.path.exists (nm):
@@ -19,17 +19,17 @@ def find_pound_include (line, use_outside, use_slash):
return ""
def find_system_include (line):
- inc = re.findall (ur"^\s*#\s*include\s*<(.+?)>", line)
+ inc = re.findall (r"^\s*#\s*include\s*<(.+?)>", line)
if len(inc) == 1:
return inc[0]
return ""
def find_pound_define (line):
- inc = re.findall (ur"^\s*#\s*define ([A-Za-z0-9_]+)", line)
+ inc = re.findall (r"^\s*#\s*define ([A-Za-z0-9_]+)", line)
if len(inc) != 0:
if len(inc) > 1:
- print "What? more than 1 match in #define??"
- print inc
+ print("What? more than 1 match in #define??")
+ print(inc)
sys.exit(5)
return inc[0];
return ""
@@ -49,26 +49,26 @@ def is_pound_endif (line):
return False
def find_pound_if (line):
- inc = re.findall (ur"^\s*#\s*if\s+(.*)", line)
+ inc = re.findall (r"^\s*#\s*if\s+(.*)", line)
if len(inc) == 0:
- inc = re.findall (ur"^\s*#\s*elif\s+(.*)", line)
+ inc = re.findall (r"^\s*#\s*elif\s+(.*)", line)
if len(inc) > 0:
- inc2 = re.findall (ur"defined\s*\((.+?)\)", inc[0])
- inc3 = re.findall (ur"defined\s+([a-zA-Z0-9_]+)", inc[0])
+ inc2 = re.findall (r"defined\s*\((.+?)\)", inc[0])
+ inc3 = re.findall (r"defined\s+([a-zA-Z0-9_]+)", inc[0])
for yy in inc3:
inc2.append (yy)
return inc2
else:
- inc = re.findall (ur"^\s*#\s*ifdef\s(.*)", line)
+ inc = re.findall (r"^\s*#\s*ifdef\s(.*)", line)
if len(inc) == 0:
- inc = re.findall (ur"^\s*#\s*ifndef\s(.*)", line)
+ inc = re.findall (r"^\s*#\s*ifndef\s(.*)", line)
if len(inc) > 0:
inc2 = re.findall ("[A-Za-z_][A-Za-z_0-9]*", inc[0])
return inc2
if len(inc) == 0:
return list ()
- print "WTF. more than one line returned for find_pound_if"
- print inc
+ print("WTF. more than one line returned for find_pound_if")
+ print(inc)
sys.exit(5)
@@ -248,8 +248,8 @@ def find_gcc_bld_dir (path):
for y in files:
p = os.path.dirname (y)
if os.path.basename (p) == "gcc":
- blddir = p
- break
+ blddir = p
+ break
return blddir
@@ -424,7 +424,7 @@ def find_replace_include (find, replace, src):
# pass in a require and provide dictionary to be read in.
def read_require_provides (require, provide):
if not os.path.exists ("require-provide.master"):
- print "require-provide.master file is not available. please run data collection."
+ print("require-provide.master file is not available. please run data collection.")
sys.exit(1)
incl_list = open("require-provide.master").read().splitlines()
for f in incl_list:
@@ -501,7 +501,7 @@ def spawn_makes (command_list):
c = subprocess.Popen(command, bufsize=-1, stdout=devnull, stderr=subprocess.PIPE, shell=True)
proc_res.append ((c, tname))
- print text[:-2]
+ print(text[:-2])
for p in proc_res:
output = p[0].communicate()
@@ -1,4 +1,4 @@
-#! /usr/bin/python2
+#! /usr/bin/python3
import os.path
import sys
import shlex
@@ -43,7 +43,7 @@ for x in sys.argv[1:]:
file_list = open (x[2:]).read().splitlines()
scanfiles = False
elif x[0] == "-":
- print "Error: Unknown option " + x
+ print("Error: Unknown option " + x)
usage = True
else:
src.append (x)
@@ -89,21 +89,21 @@ if not usage:
if len (careabout) < num_match:
output = ""
if output != "":
- print output
+ print(output)
else:
- print "included-by [-h] [-i] [-c] [-v] [-a] [-nx] file1 [file2] ... [filen]"
- print "find the list of all files in subdirectories that include any of "
- print "the listed files. processed to a depth of 3 subdirs"
- print " -h : Show this message"
- print " -i : process only header files (*.h) for #include"
- print " -c : process only source files (*.c *.cc) for #include"
- print " If nothing is specified, defaults to -i -c"
- print " -s : Same as -c."
- print " -v : Show which include(s) were found"
- print " -nx : Only list files which have at least x different matches. Default = 1"
- print " -a : Show only files which all listed files are included"
- print " This is equivilent to -nT where T == # of items in list"
- print " -flistfile : Show only files contained in the list of files"
+ print("included-by [-h] [-i] [-c] [-v] [-a] [-nx] file1 [file2] ... [filen]")
+ print("find the list of all files in subdirectories that include any of ")
+ print("the listed files. processed to a depth of 3 subdirs")
+ print(" -h : Show this message")
+ print(" -i : process only header files (*.h) for #include")
+ print(" -c : process only source files (*.c *.cc) for #include")
+ print(" If nothing is specified, defaults to -i -c")
+ print(" -s : Same as -c.")
+ print(" -v : Show which include(s) were found")
+ print(" -nx : Only list files which have at least x different matches. Default = 1")
+ print(" -a : Show only files which all listed files are included")
+ print(" This is equivilent to -nT where T == # of items in list")
+ print(" -flistfile : Show only files contained in the list of files")
@@ -1,4 +1,4 @@
-#! /usr/bin/python2
+#! /usr/bin/python3
import os.path
import sys
import shlex
@@ -177,7 +177,7 @@ def build_target_dict (bld_dir, just_these):
target = t.strip()
tpath = bld_dir + "/" + target
if not os.path.exists (tpath + "/gcc"):
- print "Error: gcc build directory for target " + t + " Does not exist: " + tpath + "/gcc"
+ print("Error: gcc build directory for target " + t + " Does not exist: " + tpath + "/gcc")
error = True
else:
target_dict[target] = tpath
@@ -209,7 +209,7 @@ def find_targets (src_file):
targ_list = list()
obj_name = get_obj_name (src_file)
if not obj_name:
- print "Error: " + src_file + " - Cannot determine object name."
+ print("Error: " + src_file + " - Cannot determine object name.")
return list()
# Put the high priority targets which tend to trigger failures first
@@ -263,22 +263,22 @@ def try_to_remove (src_file, h_list, verbose):
hostbuild = True
if not target_dict:
summary = src_file + ": Target builds are required for config files. None found."
- print summary
+ print(summary)
return summary
if not targ_list:
summary =src_file + ": Cannot find any targets which build this file."
- print summary
+ print(summary)
return summary
if hostbuild:
# confirm it actually builds before we do anything
- print "Confirming source file builds"
+ print("Confirming source file builds")
res = get_make_output (build_dir + "/gcc", "all")
if res[0] != 0:
message = "Error: " + src_file + " does not build currently."
summary = src_file + " does not build on host."
- print message
- print res[1]
+ print(message)
+ print(res[1])
if verbose:
verbose.write (message + "\n")
verbose.write (res[1]+ "\n")
@@ -314,7 +314,7 @@ def try_to_remove (src_file, h_list, verbose):
lookfor = ii_src_line(src_info)[inc_file]
src_tmp.remove (lookfor)
message = "Trying " + src_file + " without " + inc_file
- print message
+ print(message)
if verbose:
verbose.write (message + "\n")
out = open(src_file, "w")
@@ -350,7 +350,7 @@ def try_to_remove (src_file, h_list, verbose):
message = "Passed host and target builds"
if keep:
- print message + "\n"
+ print(message + "\n")
if (rc != 0):
if verbose:
@@ -392,7 +392,7 @@ def try_to_remove (src_file, h_list, verbose):
if ii_path(iinfo) == "build" and not target_dict:
keep = True
text = message + " : Will not remove a build file without some targets."
- print text
+ print(text)
ilog = open(src_file+".log","a")
ilog.write (text +"\n")
ilog.write ("============================================\n");
@@ -414,7 +414,7 @@ def try_to_remove (src_file, h_list, verbose):
text = message + ", but must keep " + inc_file + " because it provides " + dep
if because.get(dep) != None:
text = text + " Possibly required by " + because[dep]
- print text
+ print(text)
ilog = open(inc_file+".log","a")
ilog.write (because[dep]+": Requires [dep] in "+src_file+"\n")
ilog.write ("============================================\n");
@@ -444,7 +444,7 @@ def try_to_remove (src_file, h_list, verbose):
src_tmp = copy.deepcopy (src_data)
else:
- print message + " --> removing " + inc_file + "\n"
+ print(message + " --> removing " + inc_file + "\n")
rmcount += 1
if verbose:
verbose.write (message + " --> removing " + inc_file + "\n")
@@ -454,7 +454,7 @@ def try_to_remove (src_file, h_list, verbose):
remove_count[inc_file] += 1
src_data = copy.deepcopy (src_tmp)
except:
- print "Interuption: restoring original file"
+ print("Interuption: restoring original file")
out = open(src_file, "w")
for line in src_orig:
out.write (line)
@@ -473,8 +473,8 @@ def try_to_remove (src_file, h_list, verbose):
rc = res[0]
if (rc != 0):
# host build failed! return to original version
- print "Error: " + src_file + " Failed to bootstrap at end!!! restoring."
- print " Bad version at " + src_file + ".bad"
+ print("Error: " + src_file + " Failed to bootstrap at end!!! restoring.")
+ print(" Bad version at " + src_file + ".bad")
os.rename (src_file, src_file + ".bad")
out = open(src_file, "w")
for line in src_orig:
@@ -486,7 +486,7 @@ def try_to_remove (src_file, h_list, verbose):
summary = src_file + ": No change."
else:
summary = src_file + ": Reduction performed, "+str(rmcount)+" includes removed."
- print summary
+ print(summary)
return summary
only_h = list ()
@@ -513,11 +513,11 @@ for x in sys.argv[1:]:
elif x[0:2] == "-t":
target_dir = x[2:]
elif x[0] == "-":
- print "Error: Unrecognized option " + x
+ print("Error: Unrecognized option " + x)
usgae = True
else:
if not os.path.exists (x):
- print "Error: specified file " + x + " does not exist."
+ print("Error: specified file " + x + " does not exist.")
usage = True
else:
src.append (x)
@@ -526,50 +526,50 @@ if target_dir:
build_target_dict (target_dir, only_targs)
if build_dir == "" and target_dir == "":
- print "Error: Must specify a build directory, and/or a target directory."
+ print("Error: Must specify a build directory, and/or a target directory.")
usage = True
if build_dir and not os.path.exists (build_dir):
- print "Error: specified build directory does not exist : " + build_dir
+ print("Error: specified build directory does not exist : " + build_dir)
usage = True
if target_dir and not os.path.exists (target_dir):
- print "Error: specified target directory does not exist : " + target_dir
+ print("Error: specified target directory does not exist : " + target_dir)
usage = True
if usage:
- print "Attempts to remove extraneous include files from source files."
- print " "
- print "Should be run from the main gcc source directory, and works on a target"
- print "directory, as we attempt to make the 'all' target."
- print " "
- print "By default, gcc-reorder-includes is run on each file before attempting"
- print "to remove includes. this removes duplicates and puts some headers in a"
- print "canonical ordering"
- print " "
- print "The build directory should be ready to compile via make. Time is saved"
- print "if the build is already complete, so that only changes need to be built."
- print " "
- print "Usage: [options] file1.c [file2.c] ... [filen.c]"
- print " -bdir : the root build directory to attempt buiding .o files."
- print " -tdir : the target build directory"
- print " -d : Ignore conditional macro dependencies."
- print " "
- print " -Dmacro : Ignore a specific macro for dependencies"
- print " -Ttarget : Only consider target in target directory."
- print " -fheader : Specifies a specific .h file to be considered."
- print " "
- print " -D, -T, and -f can be specified mulitple times and are aggregated."
- print " "
- print " The original file will be in filen.bak"
- print " "
+ print("Attempts to remove extraneous include files from source files.")
+ print(" ")
+ print("Should be run from the main gcc source directory, and works on a target")
+ print("directory, as we attempt to make the 'all' target.")
+ print(" ")
+ print("By default, gcc-reorder-includes is run on each file before attempting")
+ print("to remove includes. this removes duplicates and puts some headers in a")
+ print("canonical ordering")
+ print(" ")
+ print("The build directory should be ready to compile via make. Time is saved")
+ print("if the build is already complete, so that only changes need to be built.")
+ print(" ")
+ print("Usage: [options] file1.c [file2.c] ... [filen.c]")
+ print(" -bdir : the root build directory to attempt buiding .o files.")
+ print(" -tdir : the target build directory")
+ print(" -d : Ignore conditional macro dependencies.")
+ print(" ")
+ print(" -Dmacro : Ignore a specific macro for dependencies")
+ print(" -Ttarget : Only consider target in target directory.")
+ print(" -fheader : Specifies a specific .h file to be considered.")
+ print(" ")
+ print(" -D, -T, and -f can be specified mulitple times and are aggregated.")
+ print(" ")
+ print(" The original file will be in filen.bak")
+ print(" ")
sys.exit (0)
if only_h:
- print "Attempting to remove only these files:"
+ print("Attempting to remove only these files:")
for x in only_h:
- print x
- print " "
+ print(x)
+ print(" ")
logfile = open("reduce-headers.log","w")
@@ -583,7 +583,7 @@ ilog = open("reduce-headers.sum","a")
ilog.write ("===============================================================\n")
for x in remove_count:
msg = x + ": Removed " + str(remove_count[x]) + " times."
- print msg
+ print(msg)
logfile.write (msg + "\n")
ilog.write (msg + "\n")
@@ -1,4 +1,4 @@
-#! /usr/bin/python2
+#! /usr/bin/python3
import os.path
import sys
import shlex
@@ -20,7 +20,7 @@ for x in sys.argv[1:]:
elif x[0:2] == "-r":
replace.append (x[2:])
elif x[0:1] == "-":
- print "Error: unrecognized option " + x
+ print("Error: unrecognized option " + x)
usage = True
else:
files.append (x)
@@ -29,25 +29,25 @@ if find == "":
usage = True
if usage:
- print "replace-header -fheader -rheader [-rheader] file1 [filen.]"
+ print("replace-header -fheader -rheader [-rheader] file1 [filen.]")
sys.exit(0)
string = ""
for x in replace:
string = string + " '"+x+"'"
-print "Replacing '"+find+"' with"+string
+print("Replacing '"+find+"' with"+string)
for x in files:
src = readwholefile (x)
src = find_replace_include (find, replace, src)
if (len(src) > 0):
- print x + ": Changed"
+ print(x + ": Changed")
out = open(x, "w")
for line in src:
out.write (line);
out.close ()
else:
- print x
+ print(x)
@@ -1,4 +1,4 @@
-#! /usr/bin/python2
+#! /usr/bin/python3
import os.path
import sys
import shlex
@@ -93,17 +93,17 @@ for x in sys.argv[1:]:
if len(src) != 1:
usage = True
elif not os.path.exists (src[0]):
- print src[0] + ": Requested source file does not exist.\n"
+ print(src[0] + ": Requested source file does not exist.\n")
usage = True
if usage:
- print "show-headers [-idir] [-sfilen] file1 "
- print " "
- print " Show a hierarchical visual format how many times each header file"
- print " is included in a source file. Should be run from the source directory"
- print " files from find-include-depends"
- print " -s : search for a header, and point it out."
- print " -i : Specifies additonal directories to search for includes."
+ print("show-headers [-idir] [-sfilen] file1 ")
+ print(" ")
+ print(" Show a hierarchical visual format how many times each header file")
+ print(" is included in a source file. Should be run from the source directory")
+ print(" files from find-include-depends")
+ print(" -s : search for a header, and point it out.")
+ print(" -i : Specifies additonal directories to search for includes.")
sys.exit(0)
@@ -114,10 +114,10 @@ if extradir:
blddir = find_gcc_bld_dir ("../..")
if blddir:
- print "Using build directory: " + blddir
+ print("Using build directory: " + blddir)
incl_dirs.insert (0, blddir)
else:
- print "Could not find a build directory, better results if you specify one with -i"
+ print("Could not find a build directory, better results if you specify one with -i")
# search path is now ".", blddir, extradirs_from_-i, built_in_incl_dirs
incl_dirs.insert (0, ".")
@@ -137,15 +137,15 @@ for line in data:
if d and d[-2:] == ".h":
process_include (d, 1)
-print "\n" + x
+print("\n" + x)
for line in output:
- print line
+ print(line)
if highlight:
- print " "
+ print(" ")
for h in summary:
- print h + " is included by source file."
+ print(h + " is included by source file.")
for h in highlight:
if h not in summary:
- print h + " is not included by source file."
+ print(h + " is not included by source file.")