summaryrefslogtreecommitdiffstats
path: root/Tools/build/check_warnings.py
diff options
context:
space:
mode:
Diffstat (limited to 'Tools/build/check_warnings.py')
-rw-r--r--Tools/build/check_warnings.py146
1 files changed, 61 insertions, 85 deletions
diff --git a/Tools/build/check_warnings.py b/Tools/build/check_warnings.py
index 1ed8344..a9d0c1e 100644
--- a/Tools/build/check_warnings.py
+++ b/Tools/build/check_warnings.py
@@ -1,38 +1,49 @@
"""
-Parses compiler output with -fdiagnostics-format=json and checks that warnings
+Parses compiler output from Clang or GCC and checks that warnings
exist only in files that are expected to have warnings.
"""
import argparse
from collections import defaultdict
-import json
import re
import sys
from pathlib import Path
from typing import NamedTuple
+
class FileWarnings(NamedTuple):
name: str
count: int
-def extract_warnings_from_compiler_output_clang(
+def extract_warnings_from_compiler_output(
compiler_output: str,
+ compiler_output_type: str,
+ path_prefix: str = "",
) -> list[dict]:
"""
- Extracts warnings from the compiler output when using clang
+ Extracts warnings from the compiler output based on compiler
+ output type. Removes path prefix from file paths if provided.
+ Compatible with GCC and Clang compiler output.
"""
- # Regex to find warnings in the compiler output
- clang_warning_regex = re.compile(
- r"(?P<file>.*):(?P<line>\d+):(?P<column>\d+): warning: "
- r"(?P<message>.*) (?P<option>\[-[^\]]+\])$"
- )
+ # Choose pattern and compile regex for particular compiler output
+ if compiler_output_type == "gcc":
+ regex_pattern = (
+ r"(?P<file>.*):(?P<line>\d+):(?P<column>\d+): warning: "
+ r"(?P<message>.*?)(?: (?P<option>\[-[^\]]+\]))?$"
+ )
+ elif compiler_output_type == "clang":
+ regex_pattern = (
+ r"(?P<file>.*):(?P<line>\d+):(?P<column>\d+): warning: "
+ r"(?P<message>.*) (?P<option>\[-[^\]]+\])$"
+ )
+ compiled_regex = re.compile(regex_pattern)
compiler_warnings = []
for line in compiler_output.splitlines():
- if match := clang_warning_regex.match(line):
+ if match := compiled_regex.match(line):
compiler_warnings.append(
{
- "file": match.group("file"),
+ "file": match.group("file").removeprefix(path_prefix),
"line": match.group("line"),
"column": match.group("column"),
"message": match.group("message"),
@@ -43,63 +54,11 @@ def extract_warnings_from_compiler_output_clang(
return compiler_warnings
-def extract_warnings_from_compiler_output_json(
- compiler_output: str,
-) -> list[dict]:
- """
- Extracts warnings from the compiler output when using
- -fdiagnostics-format=json.
-
- Compiler output as a whole is not a valid json document,
- but includes many json objects and may include other output
- that is not json.
- """
- # Regex to find json arrays at the top level of the file
- # in the compiler output
- json_arrays = re.findall(r"\[(?:[^[\]]|\[[^]]*])*]", compiler_output)
- compiler_warnings = []
- for array in json_arrays:
- try:
- json_data = json.loads(array)
- json_objects_in_array = [entry for entry in json_data]
- warning_list = [
- entry
- for entry in json_objects_in_array
- if entry.get("kind") == "warning"
- ]
- for warning in warning_list:
- locations = warning["locations"]
- for location in locations:
- for key in ["caret", "start", "end"]:
- if key in location:
- compiler_warnings.append(
- {
- # Remove leading current directory if present
- "file": location[key]["file"].lstrip("./"),
- "line": location[key]["line"],
- "column": location[key]["column"],
- "message": warning["message"],
- "option": warning["option"],
- }
- )
- # Found a caret, start, or end in location so
- # break out completely to address next warning
- break
- else:
- continue
- break
-
- except json.JSONDecodeError:
- continue # Skip malformed JSON
-
- return compiler_warnings
-
-
def get_warnings_by_file(warnings: list[dict]) -> dict[str, list[dict]]:
"""
- Returns a dictionary where the key is the file and the data is the warnings
- in that file. Does not include duplicate warnings for a file from list of
- provided warnings.
+ Returns a dictionary where the key is the file and the data is the
+ warnings in that file. Does not include duplicate warnings for a
+ file from list of provided warnings.
"""
warnings_by_file = defaultdict(list)
warnings_added = set()
@@ -124,22 +83,28 @@ def get_unexpected_warnings(
are associated with a file that is not found in the list of files
with expected warnings
"""
- unexpected_warnings = []
+ unexpected_warnings = {}
for file in files_with_warnings.keys():
found_file_in_ignore_list = False
for ignore_file in files_with_expected_warnings:
if file == ignore_file.name:
if len(files_with_warnings[file]) > ignore_file.count:
- unexpected_warnings.extend(files_with_warnings[file])
+ unexpected_warnings[file] = (files_with_warnings[file], ignore_file.count)
found_file_in_ignore_list = True
break
if not found_file_in_ignore_list:
- unexpected_warnings.extend(files_with_warnings[file])
+ unexpected_warnings[file] = (files_with_warnings[file], 0)
if unexpected_warnings:
print("Unexpected warnings:")
- for warning in unexpected_warnings:
- print(warning)
+ for file in unexpected_warnings:
+ print(
+ f"{file} expected {unexpected_warnings[file][1]} warnings,"
+ f" found {len(unexpected_warnings[file][0])}"
+ )
+ for warning in unexpected_warnings[file][0]:
+ print(warning)
+
return 1
return 0
@@ -156,14 +121,14 @@ def get_unexpected_improvements(
unexpected_improvements = []
for file in files_with_expected_warnings:
if file.name not in files_with_warnings.keys():
- unexpected_improvements.append(file)
+ unexpected_improvements.append((file.name, file.count, 0))
elif len(files_with_warnings[file.name]) < file.count:
- unexpected_improvements.append(file)
+ unexpected_improvements.append((file.name, file.count, len(files_with_warnings[file.name])))
if unexpected_improvements:
print("Unexpected improvements:")
for file in unexpected_improvements:
- print(file.name)
+ print(f"{file[0]} expected {file[1]} warnings, found {file[2]}")
return 1
return 0
@@ -204,8 +169,15 @@ def main(argv: list[str] | None = None) -> int:
"--compiler-output-type",
type=str,
required=True,
- choices=["json", "clang"],
- help="Type of compiler output file (json or clang)",
+ choices=["gcc", "clang"],
+ help="Type of compiler output file (GCC or Clang)",
+ )
+ parser.add_argument(
+ "-p",
+ "--path-prefix",
+ type=str,
+ help="Path prefix to remove from the start of file paths"
+ " in compiler output",
)
args = parser.parse_args(argv)
@@ -241,7 +213,9 @@ def main(argv: list[str] | None = None) -> int:
# where the first element is the file name and the second element
# is the number of warnings expected in that file
files_with_expected_warnings = {
- FileWarnings(file.strip().split()[0], int(file.strip().split()[1]))
+ FileWarnings(
+ file.strip().split()[0], int(file.strip().split()[1])
+ )
for file in clean_files
if file.strip() and not file.startswith("#")
}
@@ -249,14 +223,11 @@ def main(argv: list[str] | None = None) -> int:
with Path(args.compiler_output_file_path).open(encoding="UTF-8") as f:
compiler_output_file_contents = f.read()
- if args.compiler_output_type == "json":
- warnings = extract_warnings_from_compiler_output_json(
- compiler_output_file_contents
- )
- elif args.compiler_output_type == "clang":
- warnings = extract_warnings_from_compiler_output_clang(
- compiler_output_file_contents
- )
+ warnings = extract_warnings_from_compiler_output(
+ compiler_output_file_contents,
+ args.compiler_output_type,
+ args.path_prefix
+ )
files_with_warnings = get_warnings_by_file(warnings)
@@ -272,6 +243,11 @@ def main(argv: list[str] | None = None) -> int:
if args.fail_on_improvement:
exit_code |= status
+ print(
+ "For information about this tool and its configuration"
+ " visit https://devguide.python.org/development-tools/warnings/"
+ )
+
return exit_code