diff --git a/scripts/dts/gen_defines.py b/scripts/dts/gen_defines.py index 47b0412ca91735..eb9fc5e8ce02a1 100755 --- a/scripts/dts/gen_defines.py +++ b/scripts/dts/gen_defines.py @@ -411,8 +411,8 @@ def map_arm_gic_irq_type(irq, irq_num): idx_vals.append((idx_macro, cell_value)) idx_vals.append((idx_macro + "_EXISTS", 1)) if irq.name: - name_macro = \ - f"{path_id}_IRQ_NAME_{str2ident(irq.name)}_VAL_{name}" + name_macro = ( + f"{path_id}_IRQ_NAME_{str2ident(irq.name)}_VAL_{name}") name_vals.append((name_macro, f"DT_{idx_macro}")) name_vals.append((name_macro + "_EXISTS", 1)) @@ -619,28 +619,27 @@ def write_vanilla_props(node: edtlib.Node) -> None: plen = prop_len(prop) if plen is not None: # DT_N__P__FOREACH_PROP_ELEM - macro2val[f"{macro}_FOREACH_PROP_ELEM(fn)"] = \ - ' \\\n\t'.join( - f'fn(DT_{node.z_path_id}, {prop_id}, {i})' - for i in range(plen)) + macro2val[f"{macro}_FOREACH_PROP_ELEM(fn)"] = ( + ' \\\n\t'.join(f'fn(DT_{node.z_path_id}, {prop_id}, {i})' + for i in range(plen))) # DT_N__P__FOREACH_PROP_ELEM_SEP - macro2val[f"{macro}_FOREACH_PROP_ELEM_SEP(fn, sep)"] = \ + macro2val[f"{macro}_FOREACH_PROP_ELEM_SEP(fn, sep)"] = ( ' DT_DEBRACKET_INTERNAL sep \\\n\t'.join( f'fn(DT_{node.z_path_id}, {prop_id}, {i})' - for i in range(plen)) + for i in range(plen))) # DT_N__P__FOREACH_PROP_ELEM_VARGS - macro2val[f"{macro}_FOREACH_PROP_ELEM_VARGS(fn, ...)"] = \ + macro2val[f"{macro}_FOREACH_PROP_ELEM_VARGS(fn, ...)"] = ( ' \\\n\t'.join( f'fn(DT_{node.z_path_id}, {prop_id}, {i}, __VA_ARGS__)' - for i in range(plen)) + for i in range(plen))) # DT_N__P__FOREACH_PROP_ELEM_SEP_VARGS - macro2val[f"{macro}_FOREACH_PROP_ELEM_SEP_VARGS(fn, sep, ...)"] = \ + macro2val[f"{macro}_FOREACH_PROP_ELEM_SEP_VARGS(fn, sep, ...)"] = ( ' DT_DEBRACKET_INTERNAL sep \\\n\t'.join( f'fn(DT_{node.z_path_id}, {prop_id}, {i}, __VA_ARGS__)' - for i in range(plen)) + for i in range(plen))) # DT_N__P__LEN macro2val[f"{macro}_LEN"] = plen @@ -715,9 +714,9 @@ def fmt_dep_list(dep_list): if dep_list: # Sort the list by dependency ordinal for predictability. sorted_list = sorted(dep_list, key=lambda node: node.dep_ordinal) - return "\\\n\t" + \ - " \\\n\t".join(f"{n.dep_ordinal}, /* {n.path} */" - for n in sorted_list) + return ("\\\n\t" + " \\\n\t" + .join(f"{n.dep_ordinal}, /* {n.path} */" + for n in sorted_list)) else: return "/* nothing */" @@ -867,8 +866,8 @@ def controller_and_data_macros(entry: edtlib.ControllerAndData, i: int, macro: s # DT_N__P__NAME__VAL_ for cell, val in data.items(): cell_ident = str2ident(cell) - ret[f"{macro}_NAME_{name}_VAL_{cell_ident}"] = \ - f"DT_{macro}_IDX_{i}_VAL_{cell_ident}" + ret[f"{macro}_NAME_{name}_VAL_{cell_ident}"] = ( + f"DT_{macro}_IDX_{i}_VAL_{cell_ident}") ret[f"{macro}_NAME_{name}_VAL_{cell_ident}_EXISTS"] = 1 return ret @@ -919,24 +918,24 @@ def write_global_macros(edt: edtlib.EDT): # Helpers for non-INST for-each macros that take node # identifiers as arguments. - for_each_macros[f"DT_FOREACH_OKAY_{ident}(fn)"] = \ + for_each_macros[f"DT_FOREACH_OKAY_{ident}(fn)"] = ( " ".join(f"fn(DT_{node.z_path_id})" - for node in okay_nodes) - for_each_macros[f"DT_FOREACH_OKAY_VARGS_{ident}(fn, ...)"] = \ + for node in okay_nodes)) + for_each_macros[f"DT_FOREACH_OKAY_VARGS_{ident}(fn, ...)"] = ( " ".join(f"fn(DT_{node.z_path_id}, __VA_ARGS__)" - for node in okay_nodes) + for node in okay_nodes)) # Helpers for INST versions of for-each macros, which take # instance numbers. We emit separate helpers for these because # avoiding an intermediate node_id --> instance number # conversion in the preprocessor helps to keep the macro # expansions simpler. That hopefully eases debugging. - for_each_macros[f"DT_FOREACH_OKAY_INST_{ident}(fn)"] = \ + for_each_macros[f"DT_FOREACH_OKAY_INST_{ident}(fn)"] = ( " ".join(f"fn({edt.compat2nodes[compat].index(node)})" - for node in okay_nodes) - for_each_macros[f"DT_FOREACH_OKAY_INST_VARGS_{ident}(fn, ...)"] = \ + for node in okay_nodes)) + for_each_macros[f"DT_FOREACH_OKAY_INST_VARGS_{ident}(fn, ...)"] = ( " ".join(f"fn({edt.compat2nodes[compat].index(node)}, __VA_ARGS__)" - for node in okay_nodes) + for node in okay_nodes)) for compat, nodes in edt.compat2nodes.items(): for node in nodes: diff --git a/scripts/dts/python-devicetree/src/devicetree/dtlib.py b/scripts/dts/python-devicetree/src/devicetree/dtlib.py index 2a1c298f120587..000c80148874ee 100644 --- a/scripts/dts/python-devicetree/src/devicetree/dtlib.py +++ b/scripts/dts/python-devicetree/src/devicetree/dtlib.py @@ -20,8 +20,9 @@ import string import sys import textwrap -from typing import Any, Dict, Iterable, List, \ - NamedTuple, NoReturn, Optional, Set, Tuple, TYPE_CHECKING, Union +from typing import (Any, Dict, Iterable, List, + NamedTuple, NoReturn, Optional, + Set, Tuple, TYPE_CHECKING, Union) # NOTE: tests/test_dtlib.py is the test suite for this library. @@ -353,8 +354,8 @@ def type(self) -> Type: if types == [_MarkerType.PATH]: return Type.PATH - if types == [_MarkerType.UINT32, _MarkerType.PHANDLE] and \ - len(self.value) == 4: + if (types == [_MarkerType.UINT32, _MarkerType.PHANDLE] + and len(self.value) == 4): return Type.PHANDLE if set(types) == {_MarkerType.UINT32, _MarkerType.PHANDLE}: @@ -608,9 +609,10 @@ def __str__(self): pos += elm_size - if pos != 0 and \ - (not next_marker or - next_marker[1] not in (_MarkerType.PHANDLE, _MarkerType.LABEL)): + if (pos != 0 + and (not next_marker + or next_marker[1] + not in (_MarkerType.PHANDLE, _MarkerType.LABEL))): s += _N_BYTES_TO_END_STR[elm_size] if pos != len(self.value): @@ -620,8 +622,8 @@ def __str__(self): def __repr__(self): - return f"" + return (f"") # # Internal functions @@ -914,8 +916,8 @@ def __repr__(self): the DT instance is evaluated. """ if self.filename: - return f"DT(filename='{self.filename}', " \ - f"include_path={self._include_path})" + return (f"DT(filename='{self.filename}', " + f"include_path={self._include_path})") return super().__repr__() def __deepcopy__(self, memo): @@ -1634,8 +1636,8 @@ def _next_token(self): # State handling - if tok_id in (_T.DEL_PROP, _T.DEL_NODE, _T.OMIT_IF_NO_REF) or \ - tok_val in ("{", ";"): + if (tok_id in (_T.DEL_PROP, _T.DEL_NODE, _T.OMIT_IF_NO_REF) + or tok_val in ("{", ";")): self._lexer_state = _EXPECT_PROPNODENAME @@ -1709,8 +1711,8 @@ def _enter_file(self, filename): def _leave_file(self): # Leaves an /include/d file, returning to the file that /include/d it - self.filename, self._lineno, self._file_contents, self._tok_end_i = \ - self._filestack.pop() + self.filename, self._lineno, self._file_contents, self._tok_end_i = ( + self._filestack.pop()) def _next_ref2node(self): # Checks that the next token is a label/path reference and returns the @@ -2067,10 +2069,10 @@ def _decode_and_escape(b): # 'backslashreplace' bytes.translate() can't map to more than a single # byte, but str.translate() can map to more than one character, so it's # nice here. There's probably a nicer way to do this. - return b.decode("utf-8", "surrogateescape") \ - .translate(_escape_table) \ - .encode("utf-8", "surrogateescape") \ - .decode("utf-8", "backslashreplace") + return (b.decode("utf-8", "surrogateescape") + .translate(_escape_table) + .encode("utf-8", "surrogateescape") + .decode("utf-8", "backslashreplace")) def _root_and_path_to_node(cur, path, fullpath): # Returns the node pointed at by 'path', relative to the Node 'cur'. For diff --git a/scripts/dts/python-devicetree/src/devicetree/edtlib.py b/scripts/dts/python-devicetree/src/devicetree/edtlib.py index 30c936b373360f..854b2698a2dc36 100644 --- a/scripts/dts/python-devicetree/src/devicetree/edtlib.py +++ b/scripts/dts/python-devicetree/src/devicetree/edtlib.py @@ -70,8 +70,8 @@ from collections import defaultdict from copy import deepcopy from dataclasses import dataclass -from typing import Any, Callable, Dict, Iterable, List, NoReturn, \ - Optional, Set, TYPE_CHECKING, Tuple, Union +from typing import (Any, Callable, Dict, Iterable, List, NoReturn, + Optional, Set, TYPE_CHECKING, Tuple, Union) import logging import os import re @@ -401,9 +401,9 @@ def _check(self, require_compatible: bool, require_description: bool): if "bus" in raw: bus = raw["bus"] - if not isinstance(bus, str) and \ - (not isinstance(bus, list) and \ - not all(isinstance(elem, str) for elem in bus)): + if (not isinstance(bus, str) and + (not isinstance(bus, list) and + not all(isinstance(elem, str) for elem in bus))): _err(f"malformed 'bus:' value in {self.path}, " "expected string or list of strings") @@ -413,8 +413,8 @@ def _check(self, require_compatible: bool, require_description: bool): # Convert bus into a list self._buses = [bus] - if "on-bus" in raw and \ - not isinstance(raw["on-bus"], str): + if ("on-bus" in raw + and not isinstance(raw["on-bus"], str)): _err(f"malformed 'on-bus:' value in {self.path}, " "expected string") @@ -422,8 +422,8 @@ def _check(self, require_compatible: bool, require_description: bool): for key, val in raw.items(): if key.endswith("-cells"): - if not isinstance(val, list) or \ - not all(isinstance(elem, str) for elem in val): + if (not isinstance(val, list) + or not all(isinstance(elem, str) for elem in val)): _err(f"malformed '{key}:' in {self.path}, " "expected a list of strings") @@ -460,8 +460,8 @@ def _check_properties(self) -> None: _err(f"'{prop_name}' in 'properties' in {self.path} should not " "have both 'deprecated' and 'required' set") - if "description" in options and \ - not isinstance(options["description"], str): + if ("description" in options + and not isinstance(options["description"], str)): _err("missing, malformed, or empty 'description' for " f"'{prop_name}' in 'properties' in {self.path}") @@ -579,9 +579,10 @@ def enum_upper_tokenizable(self) -> bool: if not self.enum_tokenizable: self._enum_upper_tokenizable = False else: - self._enum_upper_tokenizable = \ - (len(self._as_tokens) == - len(set(x.upper() for x in self._as_tokens))) + self._enum_upper_tokenizable = ( + len(self._as_tokens) == len( + set(x.upper() for x in self._as_tokens) + )) return self._enum_upper_tokenizable @property @@ -1585,14 +1586,14 @@ def _prop_val( def _check_undeclared_props(self) -> None: # Checks that all properties are declared in the binding + wl = {"compatible", "status", "ranges", "phandle", + "interrupt-parent", "interrupts-extended", "device_type"} for prop_name in self._node.props: # Allow a few special properties to not be declared in the binding - if prop_name.endswith("-controller") or \ - prop_name.startswith("#") or \ - prop_name in { - "compatible", "status", "ranges", "phandle", - "interrupt-parent", "interrupts-extended", "device_type"}: + if (prop_name.endswith("-controller") + or prop_name.startswith("#") + or prop_name in wl): continue if TYPE_CHECKING: @@ -1807,9 +1808,9 @@ def _standard_phandle_val_list( continue controller_node, data = item - mapped_controller, mapped_data = \ - _map_phandle_array_entry(prop.node, controller_node, data, - specifier_space) + mapped_controller, mapped_data = ( + _map_phandle_array_entry(prop.node, controller_node, + data, specifier_space)) controller = self.edt._node2enode[mapped_controller] # We'll fix up the names below. @@ -2066,8 +2067,8 @@ def dts_source(self) -> str: return f"{self._dt}" def __repr__(self) -> str: - return f"" + return (f"") def __deepcopy__(self, memo) -> 'EDT': """ @@ -2493,12 +2494,12 @@ def _check_include_dict(name: Optional[str], while child_filter is not None: child_copy = deepcopy(child_filter) - child_allowlist: Optional[List[str]] = \ - child_copy.pop('property-allowlist', None) - child_blocklist: Optional[List[str]] = \ - child_copy.pop('property-blocklist', None) - next_child_filter: Optional[dict] = \ - child_copy.pop('child-binding', None) + child_allowlist: Optional[List[str]] = ( + child_copy.pop('property-allowlist', None)) + child_blocklist: Optional[List[str]] = ( + child_copy.pop('property-blocklist', None)) + next_child_filter: Optional[dict] = ( + child_copy.pop('child-binding', None)) if child_copy: # We've popped out all the valid keys. @@ -2595,8 +2596,8 @@ def _merge_props(to_dict: dict, # These are used to generate errors for sketchy property overwrites. for prop in from_dict: - if isinstance(to_dict.get(prop), dict) and \ - isinstance(from_dict[prop], dict): + if (isinstance(to_dict.get(prop), dict) + and isinstance(from_dict[prop], dict)): _merge_props(to_dict[prop], from_dict[prop], prop, binding_path, check_required) elif prop not in to_dict: @@ -2709,8 +2710,8 @@ def ok_default() -> bool: # If you change this, be sure to update the type annotation for # PropertySpec.default. - if prop_type == "int" and isinstance(default, int) or \ - prop_type == "string" and isinstance(default, str): + if (prop_type == "int" and isinstance(default, int) + or prop_type == "string" and isinstance(default, str)): return True # array, uint8-array, or string-array @@ -2718,12 +2719,13 @@ def ok_default() -> bool: if not isinstance(default, list): return False - if prop_type == "array" and \ - all(isinstance(val, int) for val in default): + if (prop_type == "array" + and all(isinstance(val, int) for val in default)): return True - if prop_type == "uint8-array" and \ - all(isinstance(val, int) and 0 <= val <= 255 for val in default): + if (prop_type == "uint8-array" + and all(isinstance(val, int) + and 0 <= val <= 255 for val in default)): return True # string-array