Add documentation, fix identifier parsing

- Add documentation to all classes and functions that were
  not self-explanatory.
- Fix the parsing of identifiers, so it now behaves identically
  to the original shell script. Detects the same amount of identifiers.
- Fix macro parsing so MBEDTLS_PSA_ACCEL didn't error out
- Reformat output to be comprehensible

Signed-off-by: Yuto Takano <yuto.takano@arm.com>
This commit is contained in:
Yuto Takano 2021-08-06 16:22:06 +01:00
parent 6f38ab3bca
commit 81528c058a

View file

@ -17,7 +17,14 @@
""" """
This script confirms that the naming of all symbols and identifiers in Mbed TLS This script confirms that the naming of all symbols and identifiers in Mbed TLS
are consistent with the house style and are also self-consistent. are consistent with the house style and are also self-consistent. It performs
the following checks:
- All exported and available symbols in the library object files, are explicitly
declared in the header files.
- All macros, constants, and identifiers (function names, struct names, etc)
follow the required pattern.
- Typo checking: All words that begin with MBED exist as macros or constants.
""" """
import argparse import argparse
@ -30,27 +37,50 @@ import shutil
import subprocess import subprocess
import logging import logging
# Naming patterns to check against # Naming patterns to check against. These are defined outside the NameCheck
# class for ease of modification.
MACRO_PATTERN = r"^(MBEDTLS|PSA)_[0-9A-Z_]*[0-9A-Z]$" MACRO_PATTERN = r"^(MBEDTLS|PSA)_[0-9A-Z_]*[0-9A-Z]$"
CONSTANTS_PATTERN = MACRO_PATTERN
IDENTIFIER_PATTERN = r"^(mbedtls|psa)_[0-9a-z_]*[0-9a-z]$" IDENTIFIER_PATTERN = r"^(mbedtls|psa)_[0-9a-z_]*[0-9a-z]$"
class Match(object): class Match(object):
"""
A class representing a match, together with its found position.
Fields:
* filename: the file that the match was in.
* line: the full line containing the match.
* pos: a tuple of (start, end) positions on the line where the match is.
* name: the match itself.
"""
def __init__(self, filename, line, pos, name): def __init__(self, filename, line, pos, name):
self.filename = filename self.filename = filename
self.line = line self.line = line
self.pos = pos self.pos = pos
self.name = name self.name = name
def __str__(self):
return self.name
class Problem(object): class Problem(object):
"""
A parent class representing a form of static analysis error.
Fields:
* textwrapper: a TextWrapper instance to format problems nicely.
"""
def __init__(self): def __init__(self):
self.textwrapper = textwrap.TextWrapper() self.textwrapper = textwrap.TextWrapper()
self.textwrapper.initial_indent = " * " self.textwrapper.width = 80
self.textwrapper.subsequent_indent = " " self.textwrapper.initial_indent = " * "
self.textwrapper.subsequent_indent = " "
class SymbolNotInHeader(Problem): class SymbolNotInHeader(Problem):
"""
A problem that occurs when an exported/available symbol in the object file
is not explicitly declared in header files. Created with
NameCheck.check_symbols_declared_in_header()
Fields:
* symbol_name: the name of the symbol.
"""
def __init__(self, symbol_name): def __init__(self, symbol_name):
self.symbol_name = symbol_name self.symbol_name = symbol_name
Problem.__init__(self) Problem.__init__(self)
@ -62,21 +92,36 @@ class SymbolNotInHeader(Problem):
.format(self.symbol_name)) .format(self.symbol_name))
class PatternMismatch(Problem): class PatternMismatch(Problem):
"""
A problem that occurs when something doesn't match the expected pattern.
Created with NameCheck.check_match_pattern()
Fields:
* pattern: the expected regex pattern
* match: the Match object in question
"""
def __init__(self, pattern, match): def __init__(self, pattern, match):
self.pattern = pattern self.pattern = pattern
self.match = match self.match = match
Problem.__init__(self) Problem.__init__(self)
def __str__(self): def __str__(self):
return self.textwrapper.fill( return self.textwrapper.fill(
"{0}: '{1}' does not match the required pattern '{2}'." "{0}: '{1}' does not match the required pattern '{2}'."
.format(self.match.filename, self.match.name, self.pattern)) .format(self.match.filename, self.match.name, self.pattern))
class Typo(Problem): class Typo(Problem):
"""
A problem that occurs when a word using MBED doesn't appear to be defined as
constants nor enum values. Created with NameCheck.check_for_typos()
Fields:
* match: the Match object of the MBED name in question.
"""
def __init__(self, match): def __init__(self, match):
self.match = match self.match = match
Problem.__init__(self) Problem.__init__(self)
def __str__(self): def __str__(self):
return self.textwrapper.fill( return self.textwrapper.fill(
"{0}: '{1}' looks like a typo. It was not found in any macros or " "{0}: '{1}' looks like a typo. It was not found in any macros or "
@ -84,11 +129,15 @@ class Typo(Problem):
.format(self.match.filename, self.match.name)) .format(self.match.filename, self.match.name))
class NameCheck(object): class NameCheck(object):
"""
Representation of the core name checking operation performed by this script.
Shares a common logger, common excluded filenames, and a shared return_code.
"""
def __init__(self): def __init__(self):
self.log = None self.log = None
self.check_repo_path() self.check_repo_path()
self.return_code = 0 self.return_code = 0
self.excluded_files = ["bn_mul"] self.excluded_files = ["bn_mul", "compat-2.x.h"]
def set_return_code(self, return_code): def set_return_code(self, return_code):
if return_code > self.return_code: if return_code > self.return_code:
@ -97,7 +146,7 @@ class NameCheck(object):
def setup_logger(self, verbose=False): def setup_logger(self, verbose=False):
""" """
Set up a logger and set the change the default logging level from Set up a logger and set the change the default logging level from
WARNING to INFO. Loggers are better than print statements since their WARNING to INFO. Loggers are better than print statements since their
verbosity can be controlled. verbosity can be controlled.
""" """
self.log = logging.getLogger() self.log = logging.getLogger()
@ -119,6 +168,16 @@ class NameCheck(object):
raise Exception("Must be run from Mbed TLS root") raise Exception("Must be run from Mbed TLS root")
def get_files(self, extension, directory): def get_files(self, extension, directory):
"""
Get all files that end with .extension in the specified directory
recursively.
Args:
* extension: the file extension to search for, without the dot
* directory: the directory to recursively search for
Returns a List of relative filepaths.
"""
filenames = [] filenames = []
for root, dirs, files in sorted(os.walk(directory)): for root, dirs, files in sorted(os.walk(directory)):
for filename in sorted(files): for filename in sorted(files):
@ -127,15 +186,65 @@ class NameCheck(object):
filenames.append(os.path.join(root, filename)) filenames.append(os.path.join(root, filename))
return filenames return filenames
def parse_names_in_source(self):
"""
Calls each parsing function to retrieve various elements of the code,
together with their source location. Puts the parsed values in the
internal variable self.parse_result.
"""
self.log.info("Parsing source code...")
m_headers = self.get_files("h", os.path.join("include", "mbedtls"))
p_headers = self.get_files("h", os.path.join("include", "psa"))
t_headers = ["3rdparty/everest/include/everest/everest.h",
"3rdparty/everest/include/everest/x25519.h"]
d_headers = self.get_files("h", os.path.join("tests", "include", "test", "drivers"))
l_headers = self.get_files("h", "library")
libraries = self.get_files("c", "library") + [
"3rdparty/everest/library/everest.c",
"3rdparty/everest/library/x25519.c"]
all_macros = self.parse_macros(
m_headers + p_headers + t_headers + l_headers + d_headers)
enum_consts = self.parse_enum_consts(
m_headers + l_headers + t_headers)
identifiers = self.parse_identifiers(
m_headers + p_headers + t_headers + l_headers)
mbed_names = self.parse_MBED_names(
m_headers + p_headers + t_headers + l_headers + libraries)
symbols = self.parse_symbols()
# Remove identifier macros like mbedtls_printf or mbedtls_calloc
identifiers_justname = [x.name for x in identifiers]
actual_macros = []
for macro in all_macros:
if macro.name not in identifiers_justname:
actual_macros.append(macro)
self.log.debug("Found:")
self.log.debug(" {} Macros".format(len(all_macros)))
self.log.debug(" {} Non-identifier Macros".format(len(actual_macros)))
self.log.debug(" {} Enum Constants".format(len(enum_consts)))
self.log.debug(" {} Identifiers".format(len(identifiers)))
self.log.debug(" {} Exported Symbols".format(len(symbols)))
self.log.info("Analysing...")
self.parse_result = {
"macros": actual_macros,
"enum_consts": enum_consts,
"identifiers": identifiers,
"symbols": symbols,
"mbed_names": mbed_names
}
def parse_macros(self, header_files): def parse_macros(self, header_files):
""" """
Parse all macros defined by #define preprocessor directives. Parse all macros defined by #define preprocessor directives.
Args: Args:
header_files: A list of filepaths to look through. * header_files: A List of filepaths to look through.
Returns: Returns a List of Match objects for the found macros.
A list of Match objects for the macros.
""" """
MACRO_REGEX = r"#define (?P<macro>\w+)" MACRO_REGEX = r"#define (?P<macro>\w+)"
NON_MACROS = ( NON_MACROS = (
@ -147,36 +256,36 @@ class NameCheck(object):
for header_file in header_files: for header_file in header_files:
with open(header_file, "r") as header: with open(header_file, "r") as header:
for line in header: for line in header:
macro = re.search(MACRO_REGEX, line) for macro in re.finditer(MACRO_REGEX, line):
if (macro and if not macro.group("macro").startswith(NON_MACROS):
not macro.group("macro").startswith(NON_MACROS)): macros.append(Match(
macros.append(Match( header_file,
header_file, line,
line, (macro.start(), macro.end()),
(macro.start(), macro.end()), macro.group("macro")))
macro.group("macro")))
return macros return macros
def parse_MBED_names(self, files): def parse_MBED_names(self, files):
""" """
Parse all words in the file that begin with MBED. Includes macros. Parse all words in the file that begin with MBED. Includes macros.
There have been typos of TLS, hence the broader check than MBEDTLS.
Args: Args:
files: A list of filepaths to look through. * files: a List of filepaths to look through.
Returns: Returns a List of Match objects for words beginning with MBED.
A list of Match objects for words beginning with MBED.
""" """
MBED_names = [] MBED_names = []
for filename in files: for filename in files:
with open(filename, "r") as fp: with open(filename, "r") as fp:
for line in fp: for line in fp:
# Ignore any names that are deliberately opted-out # Ignore any names that are deliberately opted-out or in
if re.search(r"// *no-check-names", line): # legacy error directives
if re.search(r"// *no-check-names|#error", line):
continue continue
for name in re.finditer(r"\bMBED.+?_[A-Z0-9_]*", line): for name in re.finditer(r"\bMBED.+?_[A-Z0-9_]*", line):
MBED_names.append(Match( MBED_names.append(Match(
filename, filename,
@ -192,16 +301,18 @@ class NameCheck(object):
Parse all enum value constants that are declared. Parse all enum value constants that are declared.
Args: Args:
header_files: A list of filepaths to look through. * header_files: A List of filepaths to look through.
Returns: Returns a List of Match objects for the findings.
A list of (enum constants, containing filename).
""" """
enum_consts = [] enum_consts = []
for header_file in header_files: for header_file in header_files:
# Emulate a finite state machine to parse enum declarations. # Emulate a finite state machine to parse enum declarations.
# 0 = not in enum
# 1 = inside enum
# 2 = almost inside enum
state = 0 state = 0
with open(header_file, "r") as header: with open(header_file, "r") as header:
for line in header: for line in header:
@ -221,23 +332,28 @@ class NameCheck(object):
line, line,
(enum_const.start(), enum_const.end()), (enum_const.start(), enum_const.end()),
enum_const.group("enum_const"))) enum_const.group("enum_const")))
return enum_consts return enum_consts
def parse_identifiers(self, header_files): def parse_identifiers(self, header_files):
""" """
Parse all lines of a header where a function identifier is declared, Parse all lines of a header where a function identifier is declared,
based on some huersitics. Assumes every line that is not a comment or a based on some huersitics. Highly dependent on formatting style.
preprocessor directive contains some identifier.
Args: Args:
header_files: A list of filepaths to look through. * header_files: A List of filepaths to look through.
Returns: Returns a List of Match objects with identifiers.
A list of (identifier, containing filename)
""" """
EXCLUDED_DECLARATIONS = ( EXCLUDED_LINES = (
r"^(extern \"C\"|(typedef )?(struct|union|enum)( {)?$|};?$|$)" r"^("
r"extern \"C\"|"
r"(typedef )?(struct|union|enum)( {)?$|"
r"};?$|"
r"$|"
r"//|"
r"#"
r")"
) )
identifiers = [] identifiers = []
@ -245,39 +361,69 @@ class NameCheck(object):
for header_file in header_files: for header_file in header_files:
with open(header_file, "r") as header: with open(header_file, "r") as header:
in_block_comment = False in_block_comment = False
previous_line = None
for line in header: for line in header:
# Skip parsing this line if it begins or ends a block # Skip parsing this line if a block comment ends on it,
# comment, and set the state machine's state. # but don't skip if it has just started -- there is a chance
# it ends on the same line.
if re.search(r"/\*", line): if re.search(r"/\*", line):
in_block_comment = True in_block_comment = not in_block_comment
continue if re.search(r"\*/", line):
elif re.search(r"\*/", line) and in_block_comment: in_block_comment = not in_block_comment
in_block_comment = False
continue
# Skip parsing this line if it's a line comment, or if it
# begins with a preprocessor directive
if in_block_comment or re.match(r"^(//|#)", line):
continue continue
if re.match(EXCLUDED_DECLARATIONS, line): if in_block_comment:
previous_line = None
continue
if re.match(EXCLUDED_LINES, line):
previous_line = None
continue
# Match "^something something$", with optional inline/static
# This *might* be a function with its argument brackets on
# the next line, or a struct declaration, so keep note of it
if re.match(
r"(inline |static |typedef )*\w+ \w+$",
line):
previous_line = line
continue
# If previous line seemed to start an unfinished declaration
# (as above), and this line begins with a bracket, concat
# them and treat them as one line.
if previous_line and re.match(" *[\({]", line):
line = previous_line.strip() + line.strip()
previous_line = None
# Skip parsing if line has a space in front = hueristic to
# skip function argument lines (highly subject to formatting
# changes)
if line[0] == " ":
continue continue
identifier = re.search( identifier = re.search(
# Matches: "mbedtls_aes_init(" # Match something(
r"([a-zA-Z_][a-zA-Z0-9_]*)\(", r".* \**(\w+)\(|"
# Match (*something)(
r".*\( *\* *(\w+) *\) *\(|"
# Match names of named data structures
r"(?:typedef +)?(?:struct|union|enum) +(\w+)(?: *{)?$|"
# Match names of typedef instances, after closing bracket
r"}? *(\w+)[;[].*",
line line
) )
if identifier: if identifier:
# Find the group that matched, and append it
for group in identifier.groups(): for group in identifier.groups():
if group: if group:
identifiers.append(Match( identifiers.append(Match(
header_file, header_file,
line, line,
(identifier.start(), identifier.end()), (identifier.start(), identifier.end()),
identifier.group(1))) group))
return identifiers return identifiers
@ -285,19 +431,23 @@ class NameCheck(object):
""" """
Compile the Mbed TLS libraries, and parse the TLS, Crypto, and x509 Compile the Mbed TLS libraries, and parse the TLS, Crypto, and x509
object files using nm to retrieve the list of referenced symbols. object files using nm to retrieve the list of referenced symbols.
Exceptions thrown here are rethrown because they would be critical
Returns: errors that void several tests, and thus needs to halt the program. This
A list of unique symbols defined and used in the libraries. is explicitly done for clarity.
"""
Returns a List of unique symbols defined and used in the libraries.
"""
self.log.info("Compiling...")
symbols = [] symbols = []
# Back up the config and atomically compile with the full configratuion. # Back up the config and atomically compile with the full configratuion.
shutil.copy("include/mbedtls/mbedtls_config.h", shutil.copy("include/mbedtls/mbedtls_config.h",
"include/mbedtls/mbedtls_config.h.bak") "include/mbedtls/mbedtls_config.h.bak")
try: try:
# Use check=True in all subprocess calls so that failures are raised
# as exceptions and logged.
subprocess.run( subprocess.run(
["perl", "scripts/config.py", "full"], ["python3", "scripts/config.py", "full"],
encoding=sys.stdout.encoding, encoding=sys.stdout.encoding,
check=True check=True
) )
@ -326,8 +476,8 @@ class NameCheck(object):
check=True check=True
) )
except subprocess.CalledProcessError as error: except subprocess.CalledProcessError as error:
self.log.error(error)
self.set_return_code(2) self.set_return_code(2)
raise error
finally: finally:
shutil.move("include/mbedtls/mbedtls_config.h.bak", shutil.move("include/mbedtls/mbedtls_config.h.bak",
"include/mbedtls/mbedtls_config.h") "include/mbedtls/mbedtls_config.h")
@ -339,8 +489,11 @@ class NameCheck(object):
Run nm to retrieve the list of referenced symbols in each object file. Run nm to retrieve the list of referenced symbols in each object file.
Does not return the position data since it is of no use. Does not return the position data since it is of no use.
Returns: Args:
A list of unique symbols defined and used in any of the object files. * object_files: a List of compiled object files to search through.
Returns a List of unique symbols defined and used in any of the object
files.
""" """
UNDEFINED_SYMBOL = r"^\S+: +U |^$|^\S+:$" UNDEFINED_SYMBOL = r"^\S+: +U |^$|^\S+:$"
VALID_SYMBOL = r"^\S+( [0-9A-Fa-f]+)* . _*(?P<symbol>\w+)" VALID_SYMBOL = r"^\S+( [0-9A-Fa-f]+)* . _*(?P<symbol>\w+)"
@ -348,6 +501,7 @@ class NameCheck(object):
symbols = [] symbols = []
# Gather all outputs of nm
nm_output = "" nm_output = ""
for lib in object_files: for lib in object_files:
nm_output += subprocess.run( nm_output += subprocess.run(
@ -357,6 +511,7 @@ class NameCheck(object):
stderr=subprocess.STDOUT, stderr=subprocess.STDOUT,
check=True check=True
).stdout ).stdout
for line in nm_output.splitlines(): for line in nm_output.splitlines():
if not re.match(UNDEFINED_SYMBOL, line): if not re.match(UNDEFINED_SYMBOL, line):
symbol = re.match(VALID_SYMBOL, line) symbol = re.match(VALID_SYMBOL, line)
@ -364,86 +519,49 @@ class NameCheck(object):
symbols.append(symbol.group("symbol")) symbols.append(symbol.group("symbol"))
else: else:
self.log.error(line) self.log.error(line)
return symbols return symbols
def parse_names_in_source(self): def perform_checks(self, show_problems: True):
"""
Calls each parsing function to retrieve various elements of the code,
together with their source location. Puts the parsed values in the
internal variable self.parse_result.
"""
self.log.info("Parsing source code...")
m_headers = self.get_files("h", os.path.join("include", "mbedtls"))
p_headers = self.get_files("h", os.path.join("include", "psa"))
t_headers = ["3rdparty/everest/include/everest/everest.h",
"3rdparty/everest/include/everest/x25519.h"]
d_headers = self.get_files("h", os.path.join("tests", "include", "test", "drivers"))
l_headers = self.get_files("h", "library")
libraries = self.get_files("c", "library") + [
"3rdparty/everest/library/everest.c",
"3rdparty/everest/library/x25519.c"]
all_macros = self.parse_macros(
m_headers + p_headers + t_headers + l_headers + d_headers)
enum_consts = self.parse_enum_consts(m_headers + l_headers + t_headers)
identifiers = self.parse_identifiers(m_headers + p_headers + t_headers + l_headers)
symbols = self.parse_symbols()
mbed_names = self.parse_MBED_names(
m_headers + p_headers + t_headers + l_headers + libraries)
# Remove identifier macros like mbedtls_printf or mbedtls_calloc
macros = list(set(all_macros) - set(identifiers))
self.log.info("Found:")
self.log.info(" {} Macros".format(len(all_macros)))
self.log.info(" {} Enum Constants".format(len(enum_consts)))
self.log.info(" {} Identifiers".format(len(identifiers)))
self.log.info(" {} Exported Symbols".format(len(symbols)))
self.log.info("Analysing...")
self.parse_result = {
"macros": macros,
"enum_consts": enum_consts,
"identifiers": identifiers,
"symbols": symbols,
"mbed_names": mbed_names
}
def perform_checks(self):
""" """
Perform each check in order, output its PASS/FAIL status. Maintain an Perform each check in order, output its PASS/FAIL status. Maintain an
overall test status, and output that at the end. overall test status, and output that at the end.
Args:
* show_problems: whether to show the problematic examples.
""" """
self.log.info("=============")
problems = 0 problems = 0
problems += self.check_symbols_declared_in_header() problems += self.check_symbols_declared_in_header(show_problems)
pattern_checks = [ pattern_checks = [
("macros", MACRO_PATTERN), ("macros", MACRO_PATTERN),
("enum_consts", MACRO_PATTERN), ("enum_consts", CONSTANTS_PATTERN),
("identifiers", IDENTIFIER_PATTERN)] ("identifiers", IDENTIFIER_PATTERN)]
for group, check_pattern in pattern_checks: for group, check_pattern in pattern_checks:
problems += self.check_match_pattern(group, check_pattern) problems += self.check_match_pattern(
show_problems, group, check_pattern)
problems += self.check_for_typos() problems += self.check_for_typos(show_problems)
self.log.info("=============") self.log.info("=============")
if problems > 0: if problems > 0:
self.log.info("FAIL: {0} problem(s) to fix".format(str(problems))) self.log.info("FAIL: {0} problem(s) to fix".format(str(problems)))
if not show_problems:
self.log.info("Remove --quiet to show the problems.")
else: else:
self.log.info("PASS") self.log.info("PASS")
def check_symbols_declared_in_header(self): def check_symbols_declared_in_header(self, show_problems):
""" """
Perform a check that all detected symbols in the library object files Perform a check that all detected symbols in the library object files
are properly declared in headers. are properly declared in headers.
Outputs to the logger the PASS/FAIL status, followed by the location of
problems.
Returns the number of problems that needs fixing. Args:
* show_problems: whether to show the problematic examples.
Returns the number of problems that need fixing.
""" """
problems = [] problems = []
for symbol in self.parse_result["symbols"]: for symbol in self.parse_result["symbols"]:
@ -452,39 +570,48 @@ class NameCheck(object):
if symbol == identifier_match.name: if symbol == identifier_match.name:
found_symbol_declared = True found_symbol_declared = True
break break
if not found_symbol_declared: if not found_symbol_declared:
problems.append(SymbolNotInHeader(symbol)) problems.append(SymbolNotInHeader(symbol))
if problems: self.output_check_result("All symbols in header", problems, show_problems)
self.set_return_code(1)
self.log.info("All symbols in header: FAIL")
for problem in problems:
self.log.info(str(problem) + "\n")
else:
self.log.info("All symbols in header: PASS")
return len(problems) return len(problems)
def check_match_pattern(self, group_to_check, check_pattern):
def check_match_pattern(self, show_problems, group_to_check, check_pattern):
"""
Perform a check that all items of a group conform to a regex pattern.
Args:
* show_problems: whether to show the problematic examples.
* group_to_check: string key to index into self.parse_result.
* check_pattern: the regex to check against.
Returns the number of problems that need fixing.
"""
problems = [] problems = []
for item_match in self.parse_result[group_to_check]: for item_match in self.parse_result[group_to_check]:
if not re.match(check_pattern, item_match.name): if not re.match(check_pattern, item_match.name):
problems.append(PatternMismatch(check_pattern, item_match)) problems.append(PatternMismatch(check_pattern, item_match))
if re.match(r".*__.*", item_match.name): if re.match(r".*__.*", item_match.name):
problems.append(PatternMismatch("double underscore", item_match)) problems.append(PatternMismatch("double underscore", item_match))
if problems: self.output_check_result(
self.set_return_code(1) "Naming patterns of {}".format(group_to_check),
self.log.info("Naming patterns of {}: FAIL".format(group_to_check)) problems,
for problem in problems: show_problems)
self.log.info(str(problem) + "\n")
else:
self.log.info("Naming patterns of {}: PASS".format(group_to_check))
return len(problems) return len(problems)
def check_for_typos(self): def check_for_typos(self, show_problems):
"""
Perform a check that all words in the soure code beginning with MBED are
either defined as macros, or as enum constants.
Args:
* show_problems: whether to show the problematic examples.
Returns the number of problems that need fixing.
"""
problems = [] problems = []
all_caps_names = list(set([ all_caps_names = list(set([
match.name for match match.name for match
@ -494,23 +621,45 @@ class NameCheck(object):
TYPO_EXCLUSION = r"XXX|__|_$|^MBEDTLS_.*CONFIG_FILE$" TYPO_EXCLUSION = r"XXX|__|_$|^MBEDTLS_.*CONFIG_FILE$"
for name_match in self.parse_result["mbed_names"]: for name_match in self.parse_result["mbed_names"]:
if name_match.name not in all_caps_names: found = name_match.name in all_caps_names
if not re.search(TYPO_EXCLUSION, name_match.name):
# Since MBEDTLS_PSA_ACCEL_XXX defines are defined by the
# PSA driver, they will not exist as macros. However, they
# should still be checked for typos using the equivalent
# BUILTINs that exist.
if "MBEDTLS_PSA_ACCEL_" in name_match.name:
found = name_match.name.replace(
"MBEDTLS_PSA_ACCEL_",
"MBEDTLS_PSA_BUILTIN_") in all_caps_names
if not found and not re.search(TYPO_EXCLUSION, name_match.name):
problems.append(Typo(name_match)) problems.append(Typo(name_match))
self.output_check_result("Likely typos", problems, show_problems)
return len(problems)
def output_check_result(self, name, problems, show_problems):
"""
Write out the PASS/FAIL status of a performed check depending on whether
there were problems.
Args:
* show_problems: whether to show the problematic examples.
"""
if problems: if problems:
self.set_return_code(1) self.set_return_code(1)
self.log.info("Likely typos: FAIL") self.log.info("{}: FAIL".format(name))
for problem in problems: if show_problems:
self.log.info(str(problem) + "\n") self.log.info("")
for problem in problems:
self.log.warn(str(problem) + "\n")
else: else:
self.log.info("Likely typos: PASS") self.log.info("{}: PASS".format(name))
return len(problems)
def main(): def main():
""" """
Main function, parses command-line arguments. Perform argument parsing, and create an instance of NameCheck to begin the
core operation.
""" """
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
@ -523,20 +672,28 @@ def main():
parser.add_argument("-v", "--verbose", parser.add_argument("-v", "--verbose",
action="store_true", action="store_true",
help="enable script debug outputs") help="show parse results")
parser.add_argument("-q", "--quiet",
action="store_true",
help="hide unnecessary text and problematic examples")
args = parser.parse_args() args = parser.parse_args()
try: try:
name_check = NameCheck() name_check = NameCheck()
name_check.setup_logger(verbose=args.verbose) name_check.setup_logger(verbose=args.verbose)
name_check.parse_names_in_source() name_check.parse_names_in_source()
name_check.perform_checks() name_check.perform_checks(show_problems=not args.quiet)
sys.exit(name_check.return_code)
except subprocess.CalledProcessError as error:
traceback.print_exc()
print("!! Compilation faced a critical error, "
"check-names can't continue further.")
sys.exit(name_check.return_code) sys.exit(name_check.return_code)
except Exception: except Exception:
traceback.print_exc() traceback.print_exc()
sys.exit(2) sys.exit(2)
if __name__ == "__main__": if __name__ == "__main__":
main() main()