tests: replace pycodestyle with black

Drop pycodestyle for code style checking in favor of black. Black is
much faster, stable PEP8 compliant code style checker offering also
automatic formatting. It aims to be very stable and produce smallest
diffs. It's used by many small and big projects.

Running checkstyle with black takes a few seconds with a terse output.
Thus, test-checkstyle-diff is no longer necessary.

Expand scope of checkstyle to all python files in the repo, replacing
test-checkstyle with checkstyle-python.

Also, fixstyle-python is now available for automatic style formatting.

Note: python virtualenv has been consolidated in test/Makefile,
test/requirements*.txt which will eventually be moved to a central
location.  This is required to simply the automated generation of
docker executor images in the CI.

Type: improvement
Change-Id: I022a326603485f58585e879ac0f697fceefbc9c8
Signed-off-by: Klement Sekera <klement.sekera@gmail.com>
Signed-off-by: Dave Wallace <dwallacelf@gmail.com>
diff --git a/src/tools/vppapigen/vppapigen.py b/src/tools/vppapigen/vppapigen.py
index 8415c28..c7bc681 100755
--- a/src/tools/vppapigen/vppapigen.py
+++ b/src/tools/vppapigen/vppapigen.py
@@ -10,9 +10,10 @@
 import ply.lex as lex
 import ply.yacc as yacc
 
-assert sys.version_info >= (3, 5), \
-    "Not supported Python version: {}".format(sys.version)
-log = logging.getLogger('vppapigen')
+assert sys.version_info >= (3, 5), "Not supported Python version: {}".format(
+    sys.version
+)
+log = logging.getLogger("vppapigen")
 
 # Ensure we don't leave temporary files around
 sys.dont_write_bytecode = True
@@ -28,11 +29,10 @@
 
 
 def global_type_add(name, obj):
-    '''Add new type to the dictionary of types '''
-    type_name = 'vl_api_' + name + '_t'
+    """Add new type to the dictionary of types"""
+    type_name = "vl_api_" + name + "_t"
     if type_name in global_types:
-        raise KeyError("Attempted redefinition of {!r} with {!r}.".format(
-            name, obj))
+        raise KeyError("Attempted redefinition of {!r} with {!r}.".format(name, obj))
     global_types[type_name] = obj
 
 
@@ -49,104 +49,104 @@
         self.filename = filename
 
     reserved = {
-        'service': 'SERVICE',
-        'rpc': 'RPC',
-        'returns': 'RETURNS',
-        'null': 'NULL',
-        'stream': 'STREAM',
-        'events': 'EVENTS',
-        'define': 'DEFINE',
-        'typedef': 'TYPEDEF',
-        'enum': 'ENUM',
-        'enumflag': 'ENUMFLAG',
-        'typeonly': 'TYPEONLY',
-        'manual_print': 'MANUAL_PRINT',
-        'manual_endian': 'MANUAL_ENDIAN',
-        'dont_trace': 'DONT_TRACE',
-        'autoreply': 'AUTOREPLY',
-        'autoendian': 'AUTOENDIAN',
-        'option': 'OPTION',
-        'u8': 'U8',
-        'u16': 'U16',
-        'u32': 'U32',
-        'u64': 'U64',
-        'i8': 'I8',
-        'i16': 'I16',
-        'i32': 'I32',
-        'i64': 'I64',
-        'f64': 'F64',
-        'bool': 'BOOL',
-        'string': 'STRING',
-        'import': 'IMPORT',
-        'true': 'TRUE',
-        'false': 'FALSE',
-        'union': 'UNION',
-        'counters': 'COUNTERS',
-        'paths': 'PATHS',
-        'units': 'UNITS',
-        'severity': 'SEVERITY',
-        'type': 'TYPE',
-        'description': 'DESCRIPTION',
+        "service": "SERVICE",
+        "rpc": "RPC",
+        "returns": "RETURNS",
+        "null": "NULL",
+        "stream": "STREAM",
+        "events": "EVENTS",
+        "define": "DEFINE",
+        "typedef": "TYPEDEF",
+        "enum": "ENUM",
+        "enumflag": "ENUMFLAG",
+        "typeonly": "TYPEONLY",
+        "manual_print": "MANUAL_PRINT",
+        "manual_endian": "MANUAL_ENDIAN",
+        "dont_trace": "DONT_TRACE",
+        "autoreply": "AUTOREPLY",
+        "autoendian": "AUTOENDIAN",
+        "option": "OPTION",
+        "u8": "U8",
+        "u16": "U16",
+        "u32": "U32",
+        "u64": "U64",
+        "i8": "I8",
+        "i16": "I16",
+        "i32": "I32",
+        "i64": "I64",
+        "f64": "F64",
+        "bool": "BOOL",
+        "string": "STRING",
+        "import": "IMPORT",
+        "true": "TRUE",
+        "false": "FALSE",
+        "union": "UNION",
+        "counters": "COUNTERS",
+        "paths": "PATHS",
+        "units": "UNITS",
+        "severity": "SEVERITY",
+        "type": "TYPE",
+        "description": "DESCRIPTION",
     }
 
-    tokens = ['STRING_LITERAL',
-              'ID', 'NUM'] + list(reserved.values())
+    tokens = ["STRING_LITERAL", "ID", "NUM"] + list(reserved.values())
 
-    t_ignore_LINE_COMMENT = '//.*'
+    t_ignore_LINE_COMMENT = "//.*"
 
     def t_FALSE(self, t):
-        r'false'
+        r"false"
         t.value = False
         return t
 
     def t_TRUE(self, t):
-        r'false'
+        r"false"
         t.value = True
         return t
 
     def t_NUM(self, t):
-        r'0[xX][0-9a-fA-F]+|-?\d+\.?\d*'
-        base = 16 if t.value.startswith('0x') else 10
-        if '.' in t.value:
+        r"0[xX][0-9a-fA-F]+|-?\d+\.?\d*"
+        base = 16 if t.value.startswith("0x") else 10
+        if "." in t.value:
             t.value = float(t.value)
         else:
             t.value = int(t.value, base)
         return t
 
     def t_ID(self, t):
-        r'[a-zA-Z_][a-zA-Z_0-9]*'
+        r"[a-zA-Z_][a-zA-Z_0-9]*"
         # Check for reserved words
-        t.type = VPPAPILexer.reserved.get(t.value, 'ID')
+        t.type = VPPAPILexer.reserved.get(t.value, "ID")
         return t
 
     # C string
     def t_STRING_LITERAL(self, t):
-        r'\"([^\\\n]|(\\.))*?\"'
-        t.value = str(t.value).replace("\"", "")
+        r"\"([^\\\n]|(\\.))*?\" "
+        t.value = str(t.value).replace('"', "")
         return t
 
     # C or C++ comment (ignore)
     def t_comment(self, t):
-        r'(/\*(.|\n)*?\*/)|(//.*)'
-        t.lexer.lineno += t.value.count('\n')
+        r"(/\*(.|\n)*?\*/)|(//.*)"
+        t.lexer.lineno += t.value.count("\n")
 
     # Error handling rule
     def t_error(self, t):
-        raise ParseError("Illegal character '{}' ({})"
-                         "in {}: line {}".format(t.value[0],
-                                                 hex(ord(t.value[0])),
-                                                 self.filename,
-                                                 t.lexer.lineno))
+        raise ParseError(
+            "Illegal character '{}' ({})"
+            "in {}: line {}".format(
+                t.value[0], hex(ord(t.value[0])), self.filename, t.lexer.lineno
+            )
+        )
 
     # Define a rule so we can track line numbers
     def t_newline(self, t):
-        r'\n+'
+        r"\n+"
         t.lexer.lineno += len(t.value)
 
     literals = ":{}[];=.,"
 
     # A string containing ignored characters (spaces and tabs)
-    t_ignore = ' \t'
+    t_ignore = " \t"
 
 
 def vla_mark_length_field(block):
@@ -164,23 +164,25 @@
             vla = True
             if i + 1 < len(block):
                 raise ValueError(
-                    'VLA field "{}" must be the last field in message "{}"'
-                    .format(b.fieldname, name))
-        elif b.fieldtype.startswith('vl_api_'):
+                    'VLA field "{}" must be the last field in message "{}"'.format(
+                        b.fieldname, name
+                    )
+                )
+        elif b.fieldtype.startswith("vl_api_"):
             if global_types[b.fieldtype].vla:
                 vla = True
                 if i + 1 < len(block):
                     raise ValueError(
                         'VLA field "{}" must be the last '
-                        'field in message "{}"'
-                        .format(b.fieldname, name))
-        elif b.fieldtype == 'string' and b.length == 0:
+                        'field in message "{}"'.format(b.fieldname, name)
+                    )
+        elif b.fieldtype == "string" and b.length == 0:
             vla = True
             if i + 1 < len(block):
                 raise ValueError(
                     'VLA field "{}" must be the last '
-                    'field in message "{}"'
-                    .format(b.fieldname, name))
+                    'field in message "{}"'.format(b.fieldname, name)
+                )
     return vla
 
 
@@ -192,10 +194,9 @@
 
 
 class Service(Processable):
-    type = 'Service'
+    type = "Service"
 
-    def __init__(self, caller, reply, events=None, stream_message=None,
-                 stream=False):
+    def __init__(self, caller, reply, events=None, stream_message=None, stream=False):
         self.caller = caller
         self.reply = reply
         self.stream = stream
@@ -204,7 +205,7 @@
 
 
 class Typedef(Processable):
-    type = 'Typedef'
+    type = "Typedef"
 
     def __init__(self, name, flags, block):
         self.name = name
@@ -214,9 +215,9 @@
         self.manual_print = False
         self.manual_endian = False
         for f in flags:
-            if f == 'manual_print':
+            if f == "manual_print":
                 self.manual_print = True
-            elif f == 'manual_endian':
+            elif f == "manual_endian":
                 self.manual_endian = True
         global_type_add(name, self)
 
@@ -224,14 +225,14 @@
         vla_mark_length_field(self.block)
 
     def process(self, result):
-        result['types'].append(self)
+        result["types"].append(self)
 
     def __repr__(self):
         return self.name + str(self.flags) + str(self.block)
 
 
 class Using(Processable):
-    type = 'Using'
+    type = "Using"
 
     def __init__(self, name, flags, alias):
         self.name = name
@@ -243,16 +244,15 @@
         self.manual_print = False
         self.manual_endian = False
         for f in flags:
-            if f == 'manual_print':
+            if f == "manual_print":
                 self.manual_print = True
-            elif f == 'manual_endian':
+            elif f == "manual_endian":
                 self.manual_endian = True
 
         if isinstance(alias, Array):
-            a = {'type': alias.fieldtype,
-                 'length': alias.length}
+            a = {"type": alias.fieldtype, "length": alias.length}
         else:
-            a = {'type': alias.fieldtype}
+            a = {"type": alias.fieldtype}
         self.alias = a
         self.using = alias
 
@@ -265,14 +265,14 @@
         global_type_add(name, self)
 
     def process(self, result):  # -> Dict
-        result['types'].append(self)
+        result["types"].append(self)
 
     def __repr__(self):
         return self.name + str(self.alias)
 
 
 class Union(Processable):
-    type = 'Union'
+    type = "Union"
 
     def __init__(self, name, flags, block):
         self.manual_print = False
@@ -280,9 +280,9 @@
         self.name = name
 
         for f in flags:
-            if f == 'manual_print':
+            if f == "manual_print":
                 self.manual_print = True
-            elif f == 'manual_endian':
+            elif f == "manual_endian":
                 self.manual_endian = True
 
         self.block = block
@@ -292,14 +292,14 @@
         global_type_add(name, self)
 
     def process(self, result):
-        result['types'].append(self)
+        result["types"].append(self)
 
     def __repr__(self):
         return str(self.block)
 
 
 class Define(Processable):
-    type = 'Define'
+    type = "Define"
 
     def __init__(self, name, flags, block):
         self.name = name
@@ -312,15 +312,15 @@
         self.autoendian = 0
         self.options = {}
         for f in flags:
-            if f == 'dont_trace':
+            if f == "dont_trace":
                 self.dont_trace = True
-            elif f == 'manual_print':
+            elif f == "manual_print":
                 self.manual_print = True
-            elif f == 'manual_endian':
+            elif f == "manual_endian":
                 self.manual_endian = True
-            elif f == 'autoreply':
+            elif f == "autoreply":
                 self.autoreply = True
-            elif f == 'autoendian':
+            elif f == "autoendian":
                 self.autoendian = 1
 
         remove = []
@@ -337,12 +337,11 @@
         self.crc = str(block).encode()
 
     def autoreply_block(self, name, parent):
-        block = [Field('u32', 'context'),
-                 Field('i32', 'retval')]
+        block = [Field("u32", "context"), Field("i32", "retval")]
         # inherit the parent's options
         for k, v in parent.options.items():
             block.append(Option(k, v))
-        return Define(name + '_reply', [], block)
+        return Define(name + "_reply", [], block)
 
     def process(self, result):  # -> Dict
         tname = self.__class__.__name__
@@ -355,9 +354,9 @@
 
 
 class Enum(Processable):
-    type = 'Enum'
+    type = "Enum"
 
-    def __init__(self, name, block, enumtype='u32'):
+    def __init__(self, name, block, enumtype="u32"):
         self.name = name
         self.enumtype = enumtype
         self.vla = False
@@ -369,47 +368,50 @@
         bc_set = False
 
         for b in block:
-            if 'value' in b:
-                count = b['value']
+            if "value" in b:
+                count = b["value"]
             else:
                 count += 1
-            block2.append([b['id'], count])
+            block2.append([b["id"], count])
             try:
-                if b['option']['backwards_compatible']:
+                if b["option"]["backwards_compatible"]:
                     pass
                 bc_set = True
             except KeyError:
-                block3.append([b['id'], count])
+                block3.append([b["id"], count])
                 if bc_set:
-                    raise ValueError("Backward compatible enum must "
-                                     "be last {!r} {!r}"
-                                     .format(name, b['id']))
+                    raise ValueError(
+                        "Backward compatible enum must "
+                        "be last {!r} {!r}".format(name, b["id"])
+                    )
         self.block = block2
         self.crc = str(block3).encode()
         global_type_add(name, self)
 
     def process(self, result):
-        result['types'].append(self)
+        result["types"].append(self)
 
     def __repr__(self):
         return self.name + str(self.block)
 
 
 class EnumFlag(Enum):
-    type = 'EnumFlag'
+    type = "EnumFlag"
 
-    def __init__(self, name, block, enumtype='u32'):
+    def __init__(self, name, block, enumtype="u32"):
         super(EnumFlag, self).__init__(name, block, enumtype)
 
         for b in self.block:
             if bin(b[1])[2:].count("1") > 1:
-                raise TypeError("%s is not a flag enum.  No element in a "
-                                "flag enum may have more than a "
-                                "single bit set." % self.name)
+                raise TypeError(
+                    "%s is not a flag enum.  No element in a "
+                    "flag enum may have more than a "
+                    "single bit set." % self.name
+                )
 
 
 class Import(Processable):
-    type = 'Import'
+    type = "Import"
     _initialized = False
 
     def __new__(cls, *args, **kwargs):
@@ -440,7 +442,7 @@
 
 
 class Option(Processable):
-    type = 'Option'
+    type = "Option"
 
     def __init__(self, option, value=None):
         self.option = option
@@ -458,7 +460,7 @@
 
 
 class Array(Processable):
-    type = 'Array'
+    type = "Array"
 
     def __init__(self, fieldtype, name, length, modern_vla=False):
         self.fieldtype = fieldtype
@@ -474,12 +476,11 @@
             self.vla = False
 
     def __repr__(self):
-        return str([self.fieldtype, self.fieldname, self.length,
-                    self.lengthfield])
+        return str([self.fieldtype, self.fieldname, self.length, self.lengthfield])
 
 
 class Field(Processable):
-    type = 'Field'
+    type = "Field"
 
     def __init__(self, fieldtype, name, limit=None):
         # limit field has been expanded to an options dict.
@@ -487,13 +488,14 @@
         self.fieldtype = fieldtype
         self.is_lengthfield = False
 
-        if self.fieldtype == 'string':
-            raise ValueError("The string type {!r} is an "
-                             "array type ".format(name))
+        if self.fieldtype == "string":
+            raise ValueError("The string type {!r} is an " "array type ".format(name))
 
         if name in keyword.kwlist:
-            raise ValueError("Fieldname {!r} is a python keyword and is not "
-                             "accessible via the python API. ".format(name))
+            raise ValueError(
+                "Fieldname {!r} is a python keyword and is not "
+                "accessible via the python API. ".format(name)
+            )
         self.fieldname = name
         self.limit = limit
 
@@ -502,35 +504,34 @@
 
 
 class Counter(Processable):
-    type = 'Counter'
+    type = "Counter"
 
     def __init__(self, path, counter):
         self.name = path
         self.block = counter
 
     def process(self, result):  # -> Dict
-        result['Counters'].append(self)
+        result["Counters"].append(self)
 
 
 class Paths(Processable):
-    type = 'Paths'
+    type = "Paths"
 
     def __init__(self, pathset):
         self.paths = pathset
 
     def __repr__(self):
-        return "%s(paths=%s)" % (
-            self.__class__.__name__, self.paths
-        )
+        return "%s(paths=%s)" % (self.__class__.__name__, self.paths)
 
 
 class Coord:
-    """ Coordinates of a syntactic element. Consists of:
-            - File name
-            - Line number
-            - (optional) column number, for the Lexer
+    """Coordinates of a syntactic element. Consists of:
+    - File name
+    - Line number
+    - (optional) column number, for the Lexer
     """
-    __slots__ = ('file', 'line', 'column', '__weakref__')
+
+    __slots__ = ("file", "line", "column", "__weakref__")
 
     def __init__(self, file, line, column=None):
         self.file = file
@@ -568,49 +569,47 @@
             self.logger.warning("%s: %s" % (coord, msg))
 
     def _coord(self, lineno, column=None):
-        return Coord(
-            file=self.filename,
-            line=lineno, column=column)
+        return Coord(file=self.filename, line=lineno, column=column)
 
     def _token_coord(self, p, token_idx):
-        """ Returns the coordinates for the YaccProduction object 'p' indexed
-            with 'token_idx'. The coordinate includes the 'lineno' and
-            'column'. Both follow the lex semantic, starting from 1.
+        """Returns the coordinates for the YaccProduction object 'p' indexed
+        with 'token_idx'. The coordinate includes the 'lineno' and
+        'column'. Both follow the lex semantic, starting from 1.
         """
-        last_cr = p.lexer.lexdata.rfind('\n', 0, p.lexpos(token_idx))
+        last_cr = p.lexer.lexdata.rfind("\n", 0, p.lexpos(token_idx))
         if last_cr < 0:
             last_cr = -1
-        column = (p.lexpos(token_idx) - (last_cr))
+        column = p.lexpos(token_idx) - (last_cr)
         return self._coord(p.lineno(token_idx), column)
 
     def p_slist(self, p):
-        '''slist : stmt
-                 | slist stmt'''
+        """slist : stmt
+        | slist stmt"""
         if len(p) == 2:
             p[0] = [p[1]]
         else:
             p[0] = p[1] + [p[2]]
 
     def p_stmt(self, p):
-        '''stmt : define
-                | typedef
-                | option
-                | import
-                | enum
-                | enumflag
-                | union
-                | service
-                | paths
-                | counters'''
+        """stmt : define
+        | typedef
+        | option
+        | import
+        | enum
+        | enumflag
+        | union
+        | service
+        | paths
+        | counters"""
         p[0] = p[1]
 
     def p_import(self, p):
-        '''import : IMPORT STRING_LITERAL ';' '''
+        """import : IMPORT STRING_LITERAL ';'"""
         p[0] = Import(p[2], revision=self.revision)
 
     def p_path_elements(self, p):
-        '''path_elements : path_element
-                            | path_elements path_element'''
+        """path_elements : path_element
+        | path_elements path_element"""
         if len(p) == 2:
             p[0] = p[1]
         else:
@@ -620,20 +619,20 @@
                 p[0] = p[1] + [p[2]]
 
     def p_path_element(self, p):
-        '''path_element : STRING_LITERAL STRING_LITERAL ';' '''
-        p[0] = {'path': p[1], 'counter': p[2]}
+        """path_element : STRING_LITERAL STRING_LITERAL ';'"""
+        p[0] = {"path": p[1], "counter": p[2]}
 
     def p_paths(self, p):
-        '''paths : PATHS '{' path_elements '}' ';' '''
+        """paths : PATHS '{' path_elements '}' ';'"""
         p[0] = Paths(p[3])
 
     def p_counters(self, p):
-        '''counters : COUNTERS ID '{' counter_elements '}' ';' '''
+        """counters : COUNTERS ID '{' counter_elements '}' ';'"""
         p[0] = Counter(p[2], p[4])
 
     def p_counter_elements(self, p):
-        '''counter_elements : counter_element
-                            | counter_elements counter_element'''
+        """counter_elements : counter_element
+        | counter_elements counter_element"""
         if len(p) == 2:
             p[0] = [p[1]]
         else:
@@ -643,46 +642,47 @@
                 p[0] = p[1] + [p[2]]
 
     def p_counter_element(self, p):
-        '''counter_element : ID '{' counter_statements '}' ';' '''
-        p[0] = {**{'name': p[1]}, **p[3]}
+        """counter_element : ID '{' counter_statements '}' ';'"""
+        p[0] = {**{"name": p[1]}, **p[3]}
 
     def p_counter_statements(self, p):
-        '''counter_statements : counter_statement
-                        | counter_statements counter_statement'''
+        """counter_statements : counter_statement
+        | counter_statements counter_statement"""
         if len(p) == 2:
             p[0] = p[1]
         else:
             p[0] = {**p[1], **p[2]}
 
     def p_counter_statement(self, p):
-        '''counter_statement : SEVERITY ID ';'
-                             | UNITS STRING_LITERAL ';'
-                             | DESCRIPTION STRING_LITERAL ';'
-                             | TYPE ID ';' '''
+        """counter_statement : SEVERITY ID ';'
+        | UNITS STRING_LITERAL ';'
+        | DESCRIPTION STRING_LITERAL ';'
+        | TYPE ID ';'"""
         p[0] = {p[1]: p[2]}
 
     def p_service(self, p):
-        '''service : SERVICE '{' service_statements '}' ';' '''
+        """service : SERVICE '{' service_statements '}' ';'"""
         p[0] = p[3]
 
     def p_service_statements(self, p):
-        '''service_statements : service_statement
-                        | service_statements service_statement'''
+        """service_statements : service_statement
+        | service_statements service_statement"""
         if len(p) == 2:
             p[0] = [p[1]]
         else:
             p[0] = p[1] + [p[2]]
 
     def p_service_statement(self, p):
-        '''service_statement : RPC ID RETURNS NULL ';'
-                             | RPC ID RETURNS ID ';'
-                             | RPC ID RETURNS STREAM ID ';'
-                             | RPC ID RETURNS ID EVENTS event_list ';' '''
+        """service_statement : RPC ID RETURNS NULL ';'
+        | RPC ID RETURNS ID ';'
+        | RPC ID RETURNS STREAM ID ';'
+        | RPC ID RETURNS ID EVENTS event_list ';'"""
         if p[2] == p[4]:
             # Verify that caller and reply differ
             self._parse_error(
-                'Reply ID ({}) should not be equal to Caller ID'.format(p[2]),
-                self._token_coord(p, 1))
+                "Reply ID ({}) should not be equal to Caller ID".format(p[2]),
+                self._token_coord(p, 1),
+            )
         if len(p) == 8:
             p[0] = Service(p[2], p[4], p[6])
         elif len(p) == 7:
@@ -691,280 +691,283 @@
             p[0] = Service(p[2], p[4])
 
     def p_service_statement2(self, p):
-        '''service_statement : RPC ID RETURNS ID STREAM ID ';' '''
+        """service_statement : RPC ID RETURNS ID STREAM ID ';'"""
         p[0] = Service(p[2], p[4], stream_message=p[6], stream=True)
 
     def p_event_list(self, p):
-        '''event_list : events
-                      | event_list events '''
+        """event_list : events
+        | event_list events"""
         if len(p) == 2:
             p[0] = [p[1]]
         else:
             p[0] = p[1] + [p[2]]
 
     def p_event(self, p):
-        '''events : ID
-                  | ID ',' '''
+        """events : ID
+        | ID ','"""
         p[0] = p[1]
 
     def p_enum(self, p):
-        '''enum : ENUM ID '{' enum_statements '}' ';' '''
+        """enum : ENUM ID '{' enum_statements '}' ';'"""
         p[0] = Enum(p[2], p[4])
 
     def p_enum_type(self, p):
-        ''' enum : ENUM ID ':' enum_size '{' enum_statements '}' ';' '''
+        """enum : ENUM ID ':' enum_size '{' enum_statements '}' ';'"""
         if len(p) == 9:
             p[0] = Enum(p[2], p[6], enumtype=p[4])
         else:
             p[0] = Enum(p[2], p[4])
 
     def p_enumflag(self, p):
-        '''enumflag : ENUMFLAG ID '{' enum_statements '}' ';' '''
+        """enumflag : ENUMFLAG ID '{' enum_statements '}' ';'"""
         p[0] = EnumFlag(p[2], p[4])
 
     def p_enumflag_type(self, p):
-        ''' enumflag : ENUMFLAG ID ':' enumflag_size '{' enum_statements '}' ';' '''  # noqa : E502
+        """enumflag : ENUMFLAG ID ':' enumflag_size '{' enum_statements '}' ';'"""  # noqa : E502
         if len(p) == 9:
             p[0] = EnumFlag(p[2], p[6], enumtype=p[4])
         else:
             p[0] = EnumFlag(p[2], p[4])
 
     def p_enum_size(self, p):
-        ''' enum_size : U8
-                      | U16
-                      | U32
-                      | I8
-                      | I16
-                      | I32 '''
+        """enum_size : U8
+        | U16
+        | U32
+        | I8
+        | I16
+        | I32"""
         p[0] = p[1]
 
     def p_enumflag_size(self, p):
-        ''' enumflag_size : U8
-                          | U16
-                          | U32 '''
+        """enumflag_size : U8
+        | U16
+        | U32"""
         p[0] = p[1]
 
     def p_define(self, p):
-        '''define : DEFINE ID '{' block_statements_opt '}' ';' '''
+        """define : DEFINE ID '{' block_statements_opt '}' ';'"""
         self.fields = []
         p[0] = Define(p[2], [], p[4])
 
     def p_define_flist(self, p):
-        '''define : flist DEFINE ID '{' block_statements_opt '}' ';' '''
+        """define : flist DEFINE ID '{' block_statements_opt '}' ';'"""
         # Legacy typedef
-        if 'typeonly' in p[1]:
-            self._parse_error('legacy typedef. use typedef: {} {}[{}];'
-                              .format(p[1], p[2], p[4]),
-                              self._token_coord(p, 1))
+        if "typeonly" in p[1]:
+            self._parse_error(
+                "legacy typedef. use typedef: {} {}[{}];".format(p[1], p[2], p[4]),
+                self._token_coord(p, 1),
+            )
         else:
             p[0] = Define(p[3], p[1], p[5])
 
     def p_flist(self, p):
-        '''flist : flag
-                 | flist flag'''
+        """flist : flag
+        | flist flag"""
         if len(p) == 2:
             p[0] = [p[1]]
         else:
             p[0] = p[1] + [p[2]]
 
     def p_flag(self, p):
-        '''flag : MANUAL_PRINT
-                | MANUAL_ENDIAN
-                | DONT_TRACE
-                | TYPEONLY
-                | AUTOENDIAN
-                | AUTOREPLY'''
+        """flag : MANUAL_PRINT
+        | MANUAL_ENDIAN
+        | DONT_TRACE
+        | TYPEONLY
+        | AUTOENDIAN
+        | AUTOREPLY"""
         if len(p) == 1:
             return
         p[0] = p[1]
 
     def p_typedef(self, p):
-        '''typedef : TYPEDEF ID '{' block_statements_opt '}' ';' '''
+        """typedef : TYPEDEF ID '{' block_statements_opt '}' ';'"""
         p[0] = Typedef(p[2], [], p[4])
 
     def p_typedef_flist(self, p):
-        '''typedef : flist TYPEDEF ID '{' block_statements_opt '}' ';' '''
+        """typedef : flist TYPEDEF ID '{' block_statements_opt '}' ';'"""
         p[0] = Typedef(p[3], p[1], p[5])
 
     def p_typedef_alias(self, p):
-        '''typedef : TYPEDEF declaration '''
+        """typedef : TYPEDEF declaration"""
         p[0] = Using(p[2].fieldname, [], p[2])
 
     def p_typedef_alias_flist(self, p):
-        '''typedef : flist TYPEDEF declaration '''
+        """typedef : flist TYPEDEF declaration"""
         p[0] = Using(p[3].fieldname, p[1], p[3])
 
     def p_block_statements_opt(self, p):
-        '''block_statements_opt : block_statements '''
+        """block_statements_opt : block_statements"""
         p[0] = p[1]
 
     def p_block_statements(self, p):
-        '''block_statements : block_statement
-                            | block_statements block_statement'''
+        """block_statements : block_statement
+        | block_statements block_statement"""
         if len(p) == 2:
             p[0] = [p[1]]
         else:
             p[0] = p[1] + [p[2]]
 
     def p_block_statement(self, p):
-        '''block_statement : declaration
-                           | option '''
+        """block_statement : declaration
+        | option"""
         p[0] = p[1]
 
     def p_enum_statements(self, p):
-        '''enum_statements : enum_statement
-                           | enum_statements enum_statement'''
+        """enum_statements : enum_statement
+        | enum_statements enum_statement"""
         if len(p) == 2:
             p[0] = [p[1]]
         else:
             p[0] = p[1] + [p[2]]
 
     def p_enum_statement(self, p):
-        '''enum_statement : ID '=' NUM ','
-                          | ID ','
-                          | ID '[' field_options ']' ','
-                          | ID '=' NUM '[' field_options ']' ',' '''
+        """enum_statement : ID '=' NUM ','
+        | ID ','
+        | ID '[' field_options ']' ','
+        | ID '=' NUM '[' field_options ']' ','"""
         if len(p) == 3:
-            p[0] = {'id': p[1]}
+            p[0] = {"id": p[1]}
         elif len(p) == 5:
-            p[0] = {'id': p[1], 'value': p[3]}
+            p[0] = {"id": p[1], "value": p[3]}
         elif len(p) == 6:
-            p[0] = {'id': p[1], 'option': p[3]}
+            p[0] = {"id": p[1], "option": p[3]}
         elif len(p) == 8:
-            p[0] = {'id': p[1], 'value': p[3], 'option': p[5]}
+            p[0] = {"id": p[1], "value": p[3], "option": p[5]}
         else:
-            self._parse_error('ERROR', self._token_coord(p, 1))
+            self._parse_error("ERROR", self._token_coord(p, 1))
 
     def p_field_options(self, p):
-        '''field_options : field_option
-                           | field_options field_option'''
+        """field_options : field_option
+        | field_options field_option"""
         if len(p) == 2:
             p[0] = p[1]
         else:
             p[0] = {**p[1], **p[2]}
 
     def p_field_option(self, p):
-        '''field_option : ID
-                        | ID '=' assignee ','
-                        | ID '=' assignee
+        """field_option : ID
+        | ID '=' assignee ','
+        | ID '=' assignee
 
-        '''
+        """
         if len(p) == 2:
             p[0] = {p[1]: None}
         else:
             p[0] = {p[1]: p[3]}
 
     def p_variable_name(self, p):
-        '''variable_name : ID
-                         | TYPE
-                         | SEVERITY
-                         | DESCRIPTION
-                         | COUNTERS
-                         | PATHS
-        '''
+        """variable_name : ID
+        | TYPE
+        | SEVERITY
+        | DESCRIPTION
+        | COUNTERS
+        | PATHS
+        """
         p[0] = p[1]
 
     def p_declaration(self, p):
-        '''declaration : type_specifier variable_name ';'
-                       | type_specifier variable_name '[' field_options ']' ';'
-        '''
+        """declaration : type_specifier variable_name ';'
+        | type_specifier variable_name '[' field_options ']' ';'
+        """
         if len(p) == 7:
             p[0] = Field(p[1], p[2], p[4])
         elif len(p) == 4:
             p[0] = Field(p[1], p[2])
         else:
-            self._parse_error('ERROR', self._token_coord(p, 1))
+            self._parse_error("ERROR", self._token_coord(p, 1))
         self.fields.append(p[2])
 
     def p_declaration_array_vla(self, p):
-        '''declaration : type_specifier variable_name '[' ']' ';' '''
+        """declaration : type_specifier variable_name '[' ']' ';'"""
         p[0] = Array(p[1], p[2], 0, modern_vla=True)
 
     def p_declaration_array(self, p):
-        '''declaration : type_specifier variable_name '[' NUM ']' ';'
-                       | type_specifier variable_name '[' ID ']' ';' '''
+        """declaration : type_specifier variable_name '[' NUM ']' ';'
+        | type_specifier variable_name '[' ID ']' ';'"""
 
         if len(p) != 7:
             return self._parse_error(
-                'array: %s' % p.value,
-                self._coord(lineno=p.lineno))
+                "array: %s" % p.value, self._coord(lineno=p.lineno)
+            )
 
         # Make this error later
         if type(p[4]) is int and p[4] == 0:
             # XXX: Line number is wrong
-            self._parse_warning('Old Style VLA: {} {}[{}];'
-                                .format(p[1], p[2], p[4]),
-                                self._token_coord(p, 1))
+            self._parse_warning(
+                "Old Style VLA: {} {}[{}];".format(p[1], p[2], p[4]),
+                self._token_coord(p, 1),
+            )
 
         if type(p[4]) is str and p[4] not in self.fields:
             # Verify that length field exists
-            self._parse_error('Missing length field: {} {}[{}];'
-                              .format(p[1], p[2], p[4]),
-                              self._token_coord(p, 1))
+            self._parse_error(
+                "Missing length field: {} {}[{}];".format(p[1], p[2], p[4]),
+                self._token_coord(p, 1),
+            )
         p[0] = Array(p[1], p[2], p[4])
 
     def p_option(self, p):
-        '''option : OPTION ID '=' assignee ';'
-                  | OPTION ID ';' '''
+        """option : OPTION ID '=' assignee ';'
+        | OPTION ID ';'"""
         if len(p) == 4:
             p[0] = Option(p[2])
         else:
             p[0] = Option(p[2], p[4])
 
     def p_assignee(self, p):
-        '''assignee : NUM
-                    | TRUE
-                    | FALSE
-                    | STRING_LITERAL '''
+        """assignee : NUM
+        | TRUE
+        | FALSE
+        | STRING_LITERAL"""
         p[0] = p[1]
 
     def p_type_specifier(self, p):
-        '''type_specifier : U8
-                          | U16
-                          | U32
-                          | U64
-                          | I8
-                          | I16
-                          | I32
-                          | I64
-                          | F64
-                          | BOOL
-                          | STRING'''
+        """type_specifier : U8
+        | U16
+        | U32
+        | U64
+        | I8
+        | I16
+        | I32
+        | I64
+        | F64
+        | BOOL
+        | STRING"""
         p[0] = p[1]
 
     # Do a second pass later to verify that user defined types are defined
     def p_typedef_specifier(self, p):
-        '''type_specifier : ID '''
+        """type_specifier : ID"""
         if p[1] not in global_types:
-            self._parse_error('Undefined type: {}'.format(p[1]),
-                              self._token_coord(p, 1))
+            self._parse_error(
+                "Undefined type: {}".format(p[1]), self._token_coord(p, 1)
+            )
         p[0] = p[1]
 
     def p_union(self, p):
-        '''union : UNION ID '{' block_statements_opt '}' ';' '''
+        """union : UNION ID '{' block_statements_opt '}' ';'"""
         p[0] = Union(p[2], [], p[4])
 
     def p_union_flist(self, p):
-        '''union : flist UNION ID '{' block_statements_opt '}' ';' '''
+        """union : flist UNION ID '{' block_statements_opt '}' ';'"""
         p[0] = Union(p[3], p[1], p[5])
 
     # Error rule for syntax errors
     def p_error(self, p):
         if p:
-            self._parse_error(
-                'before: %s' % p.value,
-                self._coord(lineno=p.lineno))
+            self._parse_error("before: %s" % p.value, self._coord(lineno=p.lineno))
         else:
-            self._parse_error('At end of input', self.filename)
+            self._parse_error("At end of input", self.filename)
 
 
-class VPPAPI():
-
-    def __init__(self, debug=False, filename='', logger=None, revision=None):
+class VPPAPI:
+    def __init__(self, debug=False, filename="", logger=None, revision=None):
         self.lexer = lex.lex(module=VPPAPILexer(filename), debug=debug)
-        self.parser = yacc.yacc(module=VPPAPIParser(filename, logger,
-                                                    revision=revision),
-                                write_tables=False, debug=debug)
+        self.parser = yacc.yacc(
+            module=VPPAPIParser(filename, logger, revision=revision),
+            write_tables=False,
+            debug=debug,
+        )
         self.logger = logger
         self.revision = revision
         self.filename = filename
@@ -979,38 +982,40 @@
 
     def parse_filename(self, filename, debug=0):
         if self.revision:
-            git_show = 'git show {}:{}'.format(self.revision, filename)
-            proc = Popen(git_show.split(), stdout=PIPE, encoding='utf-8')
+            git_show = "git show {}:{}".format(self.revision, filename)
+            proc = Popen(git_show.split(), stdout=PIPE, encoding="utf-8")
             try:
                 data, errs = proc.communicate()
                 if proc.returncode != 0:
-                    print('File not found: {}:{}'
-                          .format(self.revision, filename), file=sys.stderr)
+                    print(
+                        "File not found: {}:{}".format(self.revision, filename),
+                        file=sys.stderr,
+                    )
                     sys.exit(2)
                 return self.parse_string(data, debug=debug)
             except Exception:
                 sys.exit(3)
         else:
             try:
-                with open(filename, encoding='utf-8') as fd:
+                with open(filename, encoding="utf-8") as fd:
                     return self.parse_fd(fd, None)
             except FileNotFoundError:
-                print('File not found: {}'.format(filename), file=sys.stderr)
+                print("File not found: {}".format(filename), file=sys.stderr)
                 sys.exit(2)
 
     def process(self, objs):
         s = {}
-        s['Option'] = {}
-        s['Define'] = []
-        s['Service'] = []
-        s['types'] = []
-        s['Import'] = []
-        s['Counters'] = []
-        s['Paths'] = []
+        s["Option"] = {}
+        s["Define"] = []
+        s["Service"] = []
+        s["types"] = []
+        s["Import"] = []
+        s["Counters"] = []
+        s["Paths"] = []
         crc = 0
         for o in objs:
             try:
-                crc = binascii.crc32(o.crc, crc) & 0xffffffff
+                crc = binascii.crc32(o.crc, crc) & 0xFFFFFFFF
             except AttributeError:
                 pass
 
@@ -1021,82 +1026,84 @@
             else:
                 o.process(s)
 
-        msgs = {d.name: d for d in s['Define']}
-        svcs = {s.caller: s for s in s['Service']}
-        replies = {s.reply: s for s in s['Service']}
+        msgs = {d.name: d for d in s["Define"]}
+        svcs = {s.caller: s for s in s["Service"]}
+        replies = {s.reply: s for s in s["Service"]}
         seen_services = {}
 
-        s['file_crc'] = crc
+        s["file_crc"] = crc
 
         for service in svcs:
             if service not in msgs:
                 raise ValueError(
-                    'Service definition refers to unknown message'
-                    ' definition: {}'.format(service))
-            if svcs[service].reply != 'null' and \
-               svcs[service].reply not in msgs:
-                raise ValueError('Service definition refers to unknown message'
-                                 ' definition in reply: {}'
-                                 .format(svcs[service].reply))
+                    "Service definition refers to unknown message"
+                    " definition: {}".format(service)
+                )
+            if svcs[service].reply != "null" and svcs[service].reply not in msgs:
+                raise ValueError(
+                    "Service definition refers to unknown message"
+                    " definition in reply: {}".format(svcs[service].reply)
+                )
             if service in replies:
-                raise ValueError('Service definition refers to message'
-                                 ' marked as reply: {}'.format(service))
+                raise ValueError(
+                    "Service definition refers to message"
+                    " marked as reply: {}".format(service)
+                )
             for event in svcs[service].events:
                 if event not in msgs:
-                    raise ValueError('Service definition refers to unknown '
-                                     'event: {} in message: {}'
-                                     .format(event, service))
+                    raise ValueError(
+                        "Service definition refers to unknown "
+                        "event: {} in message: {}".format(event, service)
+                    )
                 seen_services[event] = True
 
         # Create services implicitly
         for d in msgs:
             if d in seen_services:
                 continue
-            if d.endswith('_reply'):
+            if d.endswith("_reply"):
                 if d[:-6] in svcs:
                     continue
                 if d[:-6] not in msgs:
-                    raise ValueError('{} missing calling message'
-                                     .format(d))
+                    raise ValueError("{} missing calling message".format(d))
                 continue
-            if d.endswith('_dump'):
+            if d.endswith("_dump"):
                 if d in svcs:
                     continue
-                if d[:-5]+'_details' in msgs:
-                    s['Service'].append(Service(d, d[:-5]+'_details',
-                                                stream=True))
+                if d[:-5] + "_details" in msgs:
+                    s["Service"].append(Service(d, d[:-5] + "_details", stream=True))
                 else:
-                    raise ValueError('{} missing details message'
-                                     .format(d))
+                    raise ValueError("{} missing details message".format(d))
                 continue
 
-            if d.endswith('_details'):
-                if d[:-8]+'_get' in msgs:
-                    if d[:-8]+'_get' in svcs:
+            if d.endswith("_details"):
+                if d[:-8] + "_get" in msgs:
+                    if d[:-8] + "_get" in svcs:
                         continue
-                    raise ValueError('{} should be in a stream service'
-                                     .format(d[:-8]+'_get'))
-                if d[:-8]+'_dump' in msgs:
+                    raise ValueError(
+                        "{} should be in a stream service".format(d[:-8] + "_get")
+                    )
+                if d[:-8] + "_dump" in msgs:
                     continue
-                raise ValueError('{} missing dump or get message'
-                                 .format(d))
+                raise ValueError("{} missing dump or get message".format(d))
 
             if d in svcs:
                 continue
-            if d+'_reply' in msgs:
-                s['Service'].append(Service(d, d+'_reply'))
+            if d + "_reply" in msgs:
+                s["Service"].append(Service(d, d + "_reply"))
             else:
                 raise ValueError(
-                    '{} missing reply message ({}) or service definition'
-                    .format(d, d+'_reply'))
+                    "{} missing reply message ({}) or service definition".format(
+                        d, d + "_reply"
+                    )
+                )
 
         return s
 
     def process_imports(self, objs, in_import, result):  # -> List
         for o in objs:
             # Only allow the following object types from imported file
-            if in_import and not isinstance(o, (Enum, Import, Typedef,
-                                                Union, Using)):
+            if in_import and not isinstance(o, (Enum, Import, Typedef, Union, Using)):
                 continue
             if isinstance(o, Import):
                 result.append(o)
@@ -1109,7 +1116,7 @@
 # Add message ids to each message.
 def add_msg_id(s):
     for o in s:
-        o.block.insert(0, Field('u16', '_vl_msg_id'))
+        o.block.insert(0, Field("u16", "_vl_msg_id"))
     return s
 
 
@@ -1129,11 +1136,11 @@
 def foldup_blocks(block, crc):
     for b in block:
         # Look up CRC in user defined types
-        if b.fieldtype.startswith('vl_api_'):
+        if b.fieldtype.startswith("vl_api_"):
             # Recursively
             t = global_types[b.fieldtype]
             try:
-                crc = binascii.crc32(t.crc, crc) & 0xffffffff
+                crc = binascii.crc32(t.crc, crc) & 0xFFFFFFFF
                 crc = foldup_blocks(t.block, crc)
             except AttributeError:
                 pass
@@ -1142,34 +1149,43 @@
 
 def foldup_crcs(s):
     for f in s:
-        f.crc = foldup_blocks(f.block,
-                              binascii.crc32(f.crc) & 0xffffffff)
+        f.crc = foldup_blocks(f.block, binascii.crc32(f.crc) & 0xFFFFFFFF)
 
 
 #
 # Main
 #
 def main():
-    if sys.version_info < (3, 5,):
-        log.exception('vppapigen requires a supported version of python. '
-                      'Please use version 3.5 or greater. '
-                      'Using %s', sys.version)
+    if sys.version_info < (
+        3,
+        5,
+    ):
+        log.exception(
+            "vppapigen requires a supported version of python. "
+            "Please use version 3.5 or greater. "
+            "Using %s",
+            sys.version,
+        )
         return 1
 
-    cliparser = argparse.ArgumentParser(description='VPP API generator')
-    cliparser.add_argument('--pluginpath', default="")
-    cliparser.add_argument('--includedir', action='append')
-    cliparser.add_argument('--outputdir', action='store')
-    cliparser.add_argument('--input')
-    cliparser.add_argument('--output', nargs='?',
-                           type=argparse.FileType('w', encoding='UTF-8'),
-                           default=sys.stdout)
+    cliparser = argparse.ArgumentParser(description="VPP API generator")
+    cliparser.add_argument("--pluginpath", default="")
+    cliparser.add_argument("--includedir", action="append")
+    cliparser.add_argument("--outputdir", action="store")
+    cliparser.add_argument("--input")
+    cliparser.add_argument(
+        "--output",
+        nargs="?",
+        type=argparse.FileType("w", encoding="UTF-8"),
+        default=sys.stdout,
+    )
 
-    cliparser.add_argument('output_module', nargs='?', default='C')
-    cliparser.add_argument('--debug', action='store_true')
-    cliparser.add_argument('--show-name', nargs=1)
-    cliparser.add_argument('--git-revision',
-                           help="Git revision to use for opening files")
+    cliparser.add_argument("output_module", nargs="?", default="C")
+    cliparser.add_argument("--debug", action="store_true")
+    cliparser.add_argument("--show-name", nargs=1)
+    cliparser.add_argument(
+        "--git-revision", help="Git revision to use for opening files"
+    )
     args = cliparser.parse_args()
 
     dirlist_add(args.includedir)
@@ -1182,7 +1198,7 @@
     elif args.input:
         filename = args.input
     else:
-        filename = ''
+        filename = ""
 
     if args.debug:
         logging.basicConfig(stream=sys.stdout, level=logging.WARNING)
@@ -1195,36 +1211,34 @@
     from importlib.machinery import SourceFileLoader
 
     # Default path
-    pluginpath = ''
+    pluginpath = ""
     if not args.pluginpath:
         cand = []
         cand.append(os.path.dirname(os.path.realpath(__file__)))
-        cand.append(os.path.dirname(os.path.realpath(__file__)) +
-                    '/../share/vpp/')
+        cand.append(os.path.dirname(os.path.realpath(__file__)) + "/../share/vpp/")
         for c in cand:
-            c += '/'
-            if os.path.isfile('{}vppapigen_{}.py'
-                              .format(c, args.output_module.lower())):
+            c += "/"
+            if os.path.isfile(
+                "{}vppapigen_{}.py".format(c, args.output_module.lower())
+            ):
                 pluginpath = c
                 break
     else:
-        pluginpath = args.pluginpath + '/'
-    if pluginpath == '':
-        log.exception('Output plugin not found')
+        pluginpath = args.pluginpath + "/"
+    if pluginpath == "":
+        log.exception("Output plugin not found")
         return 1
-    module_path = '{}vppapigen_{}.py'.format(pluginpath,
-                                             args.output_module.lower())
+    module_path = "{}vppapigen_{}.py".format(pluginpath, args.output_module.lower())
 
     try:
-        plugin = SourceFileLoader(args.output_module,
-                                  module_path).load_module()
+        plugin = SourceFileLoader(args.output_module, module_path).load_module()
     except Exception as err:
-        log.exception('Error importing output plugin: %s, %s',
-                      module_path, err)
+        log.exception("Error importing output plugin: %s, %s", module_path, err)
         return 1
 
-    parser = VPPAPI(debug=args.debug, filename=filename, logger=log,
-                    revision=args.git_revision)
+    parser = VPPAPI(
+        debug=args.debug, filename=filename, logger=log, revision=args.git_revision
+    )
 
     try:
         if not args.input:
@@ -1232,7 +1246,7 @@
         else:
             parsed_objects = parser.parse_filename(args.input, log)
     except ParseError as e:
-        print('Parse error: ', e, file=sys.stderr)
+        print("Parse error: ", e, file=sys.stderr)
         sys.exit(1)
 
     # Build a list of objects. Hash of lists.
@@ -1250,32 +1264,33 @@
     else:
         s = parser.process(parsed_objects)
         imports = parser.process_imports(parsed_objects, False, result)
-        s['imported'] = parser.process(imports)
+        s["imported"] = parser.process(imports)
 
     # Add msg_id field
-    s['Define'] = add_msg_id(s['Define'])
+    s["Define"] = add_msg_id(s["Define"])
 
     # Fold up CRCs
-    foldup_crcs(s['Define'])
+    foldup_crcs(s["Define"])
 
     #
     # Debug
     if args.debug:
         import pprint
+
         pp = pprint.PrettyPrinter(indent=4, stream=sys.stderr)
-        for t in s['Define']:
+        for t in s["Define"]:
             pp.pprint([t.name, t.flags, t.block])
-        for t in s['types']:
+        for t in s["types"]:
             pp.pprint([t.name, t.block])
 
     result = plugin.run(args, filename, s)
     if result:
         print(result, file=args.output)
     else:
-        log.exception('Running plugin failed: %s %s', filename, result)
+        log.exception("Running plugin failed: %s %s", filename, result)
         return 1
     return 0
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     sys.exit(main())