summaryrefslogtreecommitdiff
path: root/tools
diff options
context:
space:
mode:
authorTom Rini <trini@konsulko.com>2017-07-11 20:28:46 -0400
committerTom Rini <trini@konsulko.com>2017-07-11 20:28:46 -0400
commit8d3a25685e4aac7070365a2b3c53c2c81b27930f (patch)
tree7956bf5e00e3490169a7fc41c42a4416da8db51f /tools
parentd43ef73bf26614af9b01fd57baa1a1fcf24bfade (diff)
parent8c9eaadaaad888e0cd77512553d0d02d476b4dde (diff)
Merge git://git.denx.de/u-boot-dm
Diffstat (limited to 'tools')
-rwxr-xr-xtools/binman/binman.py9
-rw-r--r--tools/dtoc/dtb_platdata.py456
-rwxr-xr-xtools/dtoc/dtoc.py452
-rw-r--r--tools/dtoc/dtoc_test.dts12
-rw-r--r--tools/dtoc/dtoc_test_aliases.dts18
-rw-r--r--tools/dtoc/dtoc_test_empty.dts12
-rw-r--r--tools/dtoc/dtoc_test_phandle.dts23
-rw-r--r--tools/dtoc/dtoc_test_simple.dts48
-rw-r--r--tools/dtoc/test_dtoc.py271
-rwxr-xr-xtools/moveconfig.py604
10 files changed, 1476 insertions, 429 deletions
diff --git a/tools/binman/binman.py b/tools/binman/binman.py
index 95d3a048d8..09dc36a3f7 100755
--- a/tools/binman/binman.py
+++ b/tools/binman/binman.py
@@ -17,15 +17,14 @@ import unittest
# Bring in the patman and dtoc libraries
our_path = os.path.dirname(os.path.realpath(__file__))
-sys.path.append(os.path.join(our_path, '../patman'))
-sys.path.append(os.path.join(our_path, '../dtoc'))
-sys.path.append(os.path.join(our_path, '../'))
+for dirname in ['../patman', '../dtoc', '..']:
+ sys.path.insert(0, os.path.join(our_path, dirname))
# Bring in the libfdt module
-sys.path.append('tools')
+sys.path.insert(0, 'tools')
# Also allow entry-type modules to be brought in from the etype directory.
-sys.path.append(os.path.join(our_path, 'etype'))
+sys.path.insert(0, os.path.join(our_path, 'etype'))
import cmdline
import command
diff --git a/tools/dtoc/dtb_platdata.py b/tools/dtoc/dtb_platdata.py
new file mode 100644
index 0000000000..1f85343a9f
--- /dev/null
+++ b/tools/dtoc/dtb_platdata.py
@@ -0,0 +1,456 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2017 Google, Inc
+# Written by Simon Glass <sjg@chromium.org>
+#
+# SPDX-License-Identifier: GPL-2.0+
+#
+
+"""Device tree to platform data class
+
+This supports converting device tree data to C structures definitions and
+static data.
+"""
+
+import copy
+import sys
+
+import fdt
+import fdt_util
+
+# When we see these properties we ignore them - i.e. do not create a structure member
+PROP_IGNORE_LIST = [
+ '#address-cells',
+ '#gpio-cells',
+ '#size-cells',
+ 'compatible',
+ 'linux,phandle',
+ "status",
+ 'phandle',
+ 'u-boot,dm-pre-reloc',
+ 'u-boot,dm-tpl',
+ 'u-boot,dm-spl',
+]
+
+# C type declarations for the tyues we support
+TYPE_NAMES = {
+ fdt.TYPE_INT: 'fdt32_t',
+ fdt.TYPE_BYTE: 'unsigned char',
+ fdt.TYPE_STRING: 'const char *',
+ fdt.TYPE_BOOL: 'bool',
+}
+
+STRUCT_PREFIX = 'dtd_'
+VAL_PREFIX = 'dtv_'
+
+def conv_name_to_c(name):
+ """Convert a device-tree name to a C identifier
+
+ This uses multiple replace() calls instead of re.sub() since it is faster
+ (400ms for 1m calls versus 1000ms for the 're' version).
+
+ Args:
+ name: Name to convert
+ Return:
+ String containing the C version of this name
+ """
+ new = name.replace('@', '_at_')
+ new = new.replace('-', '_')
+ new = new.replace(',', '_')
+ new = new.replace('.', '_')
+ return new
+
+def tab_to(num_tabs, line):
+ """Append tabs to a line of text to reach a tab stop.
+
+ Args:
+ num_tabs: Tab stop to obtain (0 = column 0, 1 = column 8, etc.)
+ line: Line of text to append to
+
+ Returns:
+ line with the correct number of tabs appeneded. If the line already
+ extends past that tab stop then a single space is appended.
+ """
+ if len(line) >= num_tabs * 8:
+ return line + ' '
+ return line + '\t' * (num_tabs - len(line) // 8)
+
+def get_value(ftype, value):
+ """Get a value as a C expression
+
+ For integers this returns a byte-swapped (little-endian) hex string
+ For bytes this returns a hex string, e.g. 0x12
+ For strings this returns a literal string enclosed in quotes
+ For booleans this return 'true'
+
+ Args:
+ type: Data type (fdt_util)
+ value: Data value, as a string of bytes
+ """
+ if ftype == fdt.TYPE_INT:
+ return '%#x' % fdt_util.fdt32_to_cpu(value)
+ elif ftype == fdt.TYPE_BYTE:
+ return '%#x' % ord(value[0])
+ elif ftype == fdt.TYPE_STRING:
+ return '"%s"' % value
+ elif ftype == fdt.TYPE_BOOL:
+ return 'true'
+
+def get_compat_name(node):
+ """Get a node's first compatible string as a C identifier
+
+ Args:
+ node: Node object to check
+ Return:
+ Tuple:
+ C identifier for the first compatible string
+ List of C identifiers for all the other compatible strings
+ (possibly empty)
+ """
+ compat = node.props['compatible'].value
+ aliases = []
+ if isinstance(compat, list):
+ compat, aliases = compat[0], compat[1:]
+ return conv_name_to_c(compat), [conv_name_to_c(a) for a in aliases]
+
+def is_phandle(prop):
+ """Check if a node contains phandles
+
+ We have no reliable way of detecting whether a node uses a phandle
+ or not. As an interim measure, use a list of known property names.
+
+ Args:
+ prop: Prop object to check
+ Return:
+ True if the object value contains phandles, else False
+ """
+ if prop.name in ['clocks']:
+ return True
+ return False
+
+
+class DtbPlatdata(object):
+ """Provide a means to convert device tree binary data to platform data
+
+ The output of this process is C structures which can be used in space-
+ constrained encvironments where the ~3KB code overhead of device tree
+ code is not affordable.
+
+ Properties:
+ _fdt: Fdt object, referencing the device tree
+ _dtb_fname: Filename of the input device tree binary file
+ _valid_nodes: A list of Node object with compatible strings
+ _include_disabled: true to include nodes marked status = "disabled"
+ _phandle_nodes: A dict of nodes indexed by phandle number (1, 2...)
+ _outfile: The current output file (sys.stdout or a real file)
+ _lines: Stashed list of output lines for outputting in the future
+ _phandle_nodes: A dict of Nodes indexed by phandle (an integer)
+ """
+ def __init__(self, dtb_fname, include_disabled):
+ self._fdt = None
+ self._dtb_fname = dtb_fname
+ self._valid_nodes = None
+ self._include_disabled = include_disabled
+ self._phandle_nodes = {}
+ self._outfile = None
+ self._lines = []
+ self._aliases = {}
+
+ def setup_output(self, fname):
+ """Set up the output destination
+
+ Once this is done, future calls to self.out() will output to this
+ file.
+
+ Args:
+ fname: Filename to send output to, or '-' for stdout
+ """
+ if fname == '-':
+ self._outfile = sys.stdout
+ else:
+ self._outfile = open(fname, 'w')
+
+ def out(self, line):
+ """Output a string to the output file
+
+ Args:
+ line: String to output
+ """
+ self._outfile.write(line)
+
+ def buf(self, line):
+ """Buffer up a string to send later
+
+ Args:
+ line: String to add to our 'buffer' list
+ """
+ self._lines.append(line)
+
+ def get_buf(self):
+ """Get the contents of the output buffer, and clear it
+
+ Returns:
+ The output buffer, which is then cleared for future use
+ """
+ lines = self._lines
+ self._lines = []
+ return lines
+
+ def scan_dtb(self):
+ """Scan the device tree to obtain a tree of notes and properties
+
+ Once this is done, self._fdt.GetRoot() can be called to obtain the
+ device tree root node, and progress from there.
+ """
+ self._fdt = fdt.FdtScan(self._dtb_fname)
+
+ def scan_node(self, root):
+ """Scan a node and subnodes to build a tree of node and phandle info
+
+ This adds each node to self._valid_nodes and each phandle to
+ self._phandle_nodes.
+
+ Args:
+ root: Root node for scan
+ """
+ for node in root.subnodes:
+ if 'compatible' in node.props:
+ status = node.props.get('status')
+ if (not self._include_disabled and not status or
+ status.value != 'disabled'):
+ self._valid_nodes.append(node)
+ phandle_prop = node.props.get('phandle')
+ if phandle_prop:
+ phandle = phandle_prop.GetPhandle()
+ self._phandle_nodes[phandle] = node
+
+ # recurse to handle any subnodes
+ self.scan_node(node)
+
+ def scan_tree(self):
+ """Scan the device tree for useful information
+
+ This fills in the following properties:
+ _phandle_nodes: A dict of Nodes indexed by phandle (an integer)
+ _valid_nodes: A list of nodes we wish to consider include in the
+ platform data
+ """
+ self._phandle_nodes = {}
+ self._valid_nodes = []
+ return self.scan_node(self._fdt.GetRoot())
+
+ def scan_structs(self):
+ """Scan the device tree building up the C structures we will use.
+
+ Build a dict keyed by C struct name containing a dict of Prop
+ object for each struct field (keyed by property name). Where the
+ same struct appears multiple times, try to use the 'widest'
+ property, i.e. the one with a type which can express all others.
+
+ Once the widest property is determined, all other properties are
+ updated to match that width.
+ """
+ structs = {}
+ for node in self._valid_nodes:
+ node_name, _ = get_compat_name(node)
+ fields = {}
+
+ # Get a list of all the valid properties in this node.
+ for name, prop in node.props.items():
+ if name not in PROP_IGNORE_LIST and name[0] != '#':
+ fields[name] = copy.deepcopy(prop)
+
+ # If we've seen this node_name before, update the existing struct.
+ if node_name in structs:
+ struct = structs[node_name]
+ for name, prop in fields.items():
+ oldprop = struct.get(name)
+ if oldprop:
+ oldprop.Widen(prop)
+ else:
+ struct[name] = prop
+
+ # Otherwise store this as a new struct.
+ else:
+ structs[node_name] = fields
+
+ upto = 0
+ for node in self._valid_nodes:
+ node_name, _ = get_compat_name(node)
+ struct = structs[node_name]
+ for name, prop in node.props.items():
+ if name not in PROP_IGNORE_LIST and name[0] != '#':
+ prop.Widen(struct[name])
+ upto += 1
+
+ struct_name, aliases = get_compat_name(node)
+ for alias in aliases:
+ self._aliases[alias] = struct_name
+
+ return structs
+
+ def scan_phandles(self):
+ """Figure out what phandles each node uses
+
+ We need to be careful when outputing nodes that use phandles since
+ they must come after the declaration of the phandles in the C file.
+ Otherwise we get a compiler error since the phandle struct is not yet
+ declared.
+
+ This function adds to each node a list of phandle nodes that the node
+ depends on. This allows us to output things in the right order.
+ """
+ for node in self._valid_nodes:
+ node.phandles = set()
+ for pname, prop in node.props.items():
+ if pname in PROP_IGNORE_LIST or pname[0] == '#':
+ continue
+ if isinstance(prop.value, list):
+ if is_phandle(prop):
+ # Process the list as pairs of (phandle, id)
+ value_it = iter(prop.value)
+ for phandle_cell, _ in zip(value_it, value_it):
+ phandle = fdt_util.fdt32_to_cpu(phandle_cell)
+ target_node = self._phandle_nodes[phandle]
+ node.phandles.add(target_node)
+
+
+ def generate_structs(self, structs):
+ """Generate struct defintions for the platform data
+
+ This writes out the body of a header file consisting of structure
+ definitions for node in self._valid_nodes. See the documentation in
+ README.of-plat for more information.
+ """
+ self.out('#include <stdbool.h>\n')
+ self.out('#include <libfdt.h>\n')
+
+ # Output the struct definition
+ for name in sorted(structs):
+ self.out('struct %s%s {\n' % (STRUCT_PREFIX, name))
+ for pname in sorted(structs[name]):
+ prop = structs[name][pname]
+ if is_phandle(prop):
+ # For phandles, include a reference to the target
+ self.out('\t%s%s[%d]' % (tab_to(2, 'struct phandle_2_cell'),
+ conv_name_to_c(prop.name),
+ len(prop.value) / 2))
+ else:
+ ptype = TYPE_NAMES[prop.type]
+ self.out('\t%s%s' % (tab_to(2, ptype),
+ conv_name_to_c(prop.name)))
+ if isinstance(prop.value, list):
+ self.out('[%d]' % len(prop.value))
+ self.out(';\n')
+ self.out('};\n')
+
+ for alias, struct_name in self._aliases.iteritems():
+ self.out('#define %s%s %s%s\n'% (STRUCT_PREFIX, alias,
+ STRUCT_PREFIX, struct_name))
+
+ def output_node(self, node):
+ """Output the C code for a node
+
+ Args:
+ node: node to output
+ """
+ struct_name, _ = get_compat_name(node)
+ var_name = conv_name_to_c(node.name)
+ self.buf('static struct %s%s %s%s = {\n' %
+ (STRUCT_PREFIX, struct_name, VAL_PREFIX, var_name))
+ for pname, prop in node.props.items():
+ if pname in PROP_IGNORE_LIST or pname[0] == '#':
+ continue
+ member_name = conv_name_to_c(prop.name)
+ self.buf('\t%s= ' % tab_to(3, '.' + member_name))
+
+ # Special handling for lists
+ if isinstance(prop.value, list):
+ self.buf('{')
+ vals = []
+ # For phandles, output a reference to the platform data
+ # of the target node.
+ if is_phandle(prop):
+ # Process the list as pairs of (phandle, id)
+ value_it = iter(prop.value)
+ for phandle_cell, id_cell in zip(value_it, value_it):
+ phandle = fdt_util.fdt32_to_cpu(phandle_cell)
+ id_num = fdt_util.fdt32_to_cpu(id_cell)
+ target_node = self._phandle_nodes[phandle]
+ name = conv_name_to_c(target_node.name)
+ vals.append('{&%s%s, %d}' % (VAL_PREFIX, name, id_num))
+ else:
+ for val in prop.value:
+ vals.append(get_value(prop.type, val))
+ self.buf(', '.join(vals))
+ self.buf('}')
+ else:
+ self.buf(get_value(prop.type, prop.value))
+ self.buf(',\n')
+ self.buf('};\n')
+
+ # Add a device declaration
+ self.buf('U_BOOT_DEVICE(%s) = {\n' % var_name)
+ self.buf('\t.name\t\t= "%s",\n' % struct_name)
+ self.buf('\t.platdata\t= &%s%s,\n' % (VAL_PREFIX, var_name))
+ self.buf('\t.platdata_size\t= sizeof(%s%s),\n' % (VAL_PREFIX, var_name))
+ self.buf('};\n')
+ self.buf('\n')
+
+ self.out(''.join(self.get_buf()))
+
+ def generate_tables(self):
+ """Generate device defintions for the platform data
+
+ This writes out C platform data initialisation data and
+ U_BOOT_DEVICE() declarations for each valid node. Where a node has
+ multiple compatible strings, a #define is used to make them equivalent.
+
+ See the documentation in doc/driver-model/of-plat.txt for more
+ information.
+ """
+ self.out('#include <common.h>\n')
+ self.out('#include <dm.h>\n')
+ self.out('#include <dt-structs.h>\n')
+ self.out('\n')
+ nodes_to_output = list(self._valid_nodes)
+
+ # Keep outputing nodes until there is none left
+ while nodes_to_output:
+ node = nodes_to_output[0]
+ # Output all the node's dependencies first
+ for req_node in node.phandles:
+ if req_node in nodes_to_output:
+ self.output_node(req_node)
+ nodes_to_output.remove(req_node)
+ self.output_node(node)
+ nodes_to_output.remove(node)
+
+
+def run_steps(args, dtb_file, include_disabled, output):
+ """Run all the steps of the dtoc tool
+
+ Args:
+ args: List of non-option arguments provided to the problem
+ dtb_file: Filename of dtb file to process
+ include_disabled: True to include disabled nodes
+ output: Name of output file
+ """
+ if not args:
+ raise ValueError('Please specify a command: struct, platdata')
+
+ plat = DtbPlatdata(dtb_file, include_disabled)
+ plat.scan_dtb()
+ plat.scan_tree()
+ plat.setup_output(output)
+ structs = plat.scan_structs()
+ plat.scan_phandles()
+
+ for cmd in args[0].split(','):
+ if cmd == 'struct':
+ plat.generate_structs(structs)
+ elif cmd == 'platdata':
+ plat.generate_tables()
+ else:
+ raise ValueError("Unknown command '%s': (use: struct, platdata)" %
+ cmd)
diff --git a/tools/dtoc/dtoc.py b/tools/dtoc/dtoc.py
index 08e35f148c..ce7bc054e5 100755
--- a/tools/dtoc/dtoc.py
+++ b/tools/dtoc/dtoc.py
@@ -6,407 +6,55 @@
# SPDX-License-Identifier: GPL-2.0+
#
-import copy
-from optparse import OptionError, OptionParser
-import os
-import struct
-import sys
-
-# Bring in the patman libraries
-our_path = os.path.dirname(os.path.realpath(__file__))
-sys.path.append(os.path.join(our_path, '../patman'))
-
-import fdt
-import fdt_util
-
-# When we see these properties we ignore them - i.e. do not create a structure member
-PROP_IGNORE_LIST = [
- '#address-cells',
- '#gpio-cells',
- '#size-cells',
- 'compatible',
- 'linux,phandle',
- "status",
- 'phandle',
- 'u-boot,dm-pre-reloc',
- 'u-boot,dm-tpl',
- 'u-boot,dm-spl',
-]
-
-# C type declarations for the tyues we support
-TYPE_NAMES = {
- fdt.TYPE_INT: 'fdt32_t',
- fdt.TYPE_BYTE: 'unsigned char',
- fdt.TYPE_STRING: 'const char *',
- fdt.TYPE_BOOL: 'bool',
-};
-
-STRUCT_PREFIX = 'dtd_'
-VAL_PREFIX = 'dtv_'
-
-def Conv_name_to_c(name):
- """Convert a device-tree name to a C identifier
-
- Args:
- name: Name to convert
- Return:
- String containing the C version of this name
- """
- str = name.replace('@', '_at_')
- str = str.replace('-', '_')
- str = str.replace(',', '_')
- str = str.replace('.', '_')
- str = str.replace('/', '__')
- return str
-
-def TabTo(num_tabs, str):
- if len(str) >= num_tabs * 8:
- return str + ' '
- return str + '\t' * (num_tabs - len(str) // 8)
-
-class DtbPlatdata:
- """Provide a means to convert device tree binary data to platform data
-
- The output of this process is C structures which can be used in space-
- constrained encvironments where the ~3KB code overhead of device tree
- code is not affordable.
-
- Properties:
- fdt: Fdt object, referencing the device tree
- _dtb_fname: Filename of the input device tree binary file
- _valid_nodes: A list of Node object with compatible strings
- _options: Command-line options
- _phandle_node: A dict of nodes indexed by phandle number (1, 2...)
- _outfile: The current output file (sys.stdout or a real file)
- _lines: Stashed list of output lines for outputting in the future
- _phandle_node: A dict of Nodes indexed by phandle (an integer)
- """
- def __init__(self, dtb_fname, options):
- self._dtb_fname = dtb_fname
- self._valid_nodes = None
- self._options = options
- self._phandle_node = {}
- self._outfile = None
- self._lines = []
-
- def SetupOutput(self, fname):
- """Set up the output destination
-
- Once this is done, future calls to self.Out() will output to this
- file.
-
- Args:
- fname: Filename to send output to, or '-' for stdout
- """
- if fname == '-':
- self._outfile = sys.stdout
- else:
- self._outfile = open(fname, 'w')
-
- def Out(self, str):
- """Output a string to the output file
-
- Args:
- str: String to output
- """
- self._outfile.write(str)
-
- def Buf(self, str):
- """Buffer up a string to send later
-
- Args:
- str: String to add to our 'buffer' list
- """
- self._lines.append(str)
-
- def GetBuf(self):
- """Get the contents of the output buffer, and clear it
-
- Returns:
- The output buffer, which is then cleared for future use
- """
- lines = self._lines
- self._lines = []
- return lines
-
- def GetValue(self, type, value):
- """Get a value as a C expression
-
- For integers this returns a byte-swapped (little-endian) hex string
- For bytes this returns a hex string, e.g. 0x12
- For strings this returns a literal string enclosed in quotes
- For booleans this return 'true'
-
- Args:
- type: Data type (fdt_util)
- value: Data value, as a string of bytes
- """
- if type == fdt.TYPE_INT:
- return '%#x' % fdt_util.fdt32_to_cpu(value)
- elif type == fdt.TYPE_BYTE:
- return '%#x' % ord(value[0])
- elif type == fdt.TYPE_STRING:
- return '"%s"' % value
- elif type == fdt.TYPE_BOOL:
- return 'true'
-
- def GetCompatName(self, node):
- """Get a node's first compatible string as a C identifier
-
- Args:
- node: Node object to check
- Return:
- C identifier for the first compatible string
- """
- compat = node.props['compatible'].value
- if type(compat) == list:
- compat = compat[0]
- return Conv_name_to_c(compat)
+"""Device tree to C tool
- def ScanDtb(self):
- """Scan the device tree to obtain a tree of notes and properties
+This tool converts a device tree binary file (.dtb) into two C files. The
+indent is to allow a C program to access data from the device tree without
+having to link against libfdt. By putting the data from the device tree into
+C structures, normal C code can be used. This helps to reduce the size of the
+compiled program.
- Once this is done, self.fdt.GetRoot() can be called to obtain the
- device tree root node, and progress from there.
- """
- self.fdt = fdt.FdtScan(self._dtb_fname)
+Dtoc produces two output files:
- def ScanNode(self, root):
- for node in root.subnodes:
- if 'compatible' in node.props:
- status = node.props.get('status')
- if (not options.include_disabled and not status or
- status.value != 'disabled'):
- self._valid_nodes.append(node)
- phandle_prop = node.props.get('phandle')
- if phandle_prop:
- phandle = phandle_prop.GetPhandle()
- self._phandle_node[phandle] = node
+ dt-structs.h - contains struct definitions
+ dt-platdata.c - contains data from the device tree using the struct
+ definitions, as well as U-Boot driver definitions.
- # recurse to handle any subnodes
- self.ScanNode(node);
+This tool is used in U-Boot to provide device tree data to SPL without
+increasing the code size of SPL. This supports the CONFIG_SPL_OF_PLATDATA
+options. For more information about the use of this options and tool please
+see doc/driver-model/of-plat.txt
+"""
- def ScanTree(self):
- """Scan the device tree for useful information
-
- This fills in the following properties:
- _phandle_node: A dict of Nodes indexed by phandle (an integer)
- _valid_nodes: A list of nodes we wish to consider include in the
- platform data
- """
- self._phandle_node = {}
- self._valid_nodes = []
- return self.ScanNode(self.fdt.GetRoot());
-
- for node in self.fdt.GetRoot().subnodes:
- if 'compatible' in node.props:
- status = node.props.get('status')
- if (not options.include_disabled and not status or
- status.value != 'disabled'):
- node_list.append(node)
- phandle_prop = node.props.get('phandle')
- if phandle_prop:
- phandle = phandle_prop.GetPhandle()
- self._phandle_node[phandle] = node
-
- self._valid_nodes = node_list
-
- def IsPhandle(self, prop):
- """Check if a node contains phandles
-
- We have no reliable way of detecting whether a node uses a phandle
- or not. As an interim measure, use a list of known property names.
-
- Args:
- prop: Prop object to check
- Return:
- True if the object value contains phandles, else False
- """
- if prop.name in ['clocks']:
- return True
- return False
-
- def ScanStructs(self):
- """Scan the device tree building up the C structures we will use.
-
- Build a dict keyed by C struct name containing a dict of Prop
- object for each struct field (keyed by property name). Where the
- same struct appears multiple times, try to use the 'widest'
- property, i.e. the one with a type which can express all others.
-
- Once the widest property is determined, all other properties are
- updated to match that width.
- """
- structs = {}
- for node in self._valid_nodes:
- node_name = self.GetCompatName(node)
- fields = {}
-
- # Get a list of all the valid properties in this node.
- for name, prop in node.props.items():
- if name not in PROP_IGNORE_LIST and name[0] != '#':
- fields[name] = copy.deepcopy(prop)
-
- # If we've seen this node_name before, update the existing struct.
- if node_name in structs:
- struct = structs[node_name]
- for name, prop in fields.items():
- oldprop = struct.get(name)
- if oldprop:
- oldprop.Widen(prop)
- else:
- struct[name] = prop
-
- # Otherwise store this as a new struct.
- else:
- structs[node_name] = fields
-
- upto = 0
- for node in self._valid_nodes:
- node_name = self.GetCompatName(node)
- struct = structs[node_name]
- for name, prop in node.props.items():
- if name not in PROP_IGNORE_LIST and name[0] != '#':
- prop.Widen(struct[name])
- upto += 1
- return structs
-
- def ScanPhandles(self):
- """Figure out what phandles each node uses
-
- We need to be careful when outputing nodes that use phandles since
- they must come after the declaration of the phandles in the C file.
- Otherwise we get a compiler error since the phandle struct is not yet
- declared.
-
- This function adds to each node a list of phandle nodes that the node
- depends on. This allows us to output things in the right order.
- """
- for node in self._valid_nodes:
- node.phandles = set()
- for pname, prop in node.props.items():
- if pname in PROP_IGNORE_LIST or pname[0] == '#':
- continue
- if type(prop.value) == list:
- if self.IsPhandle(prop):
- # Process the list as pairs of (phandle, id)
- it = iter(prop.value)
- for phandle_cell, id_cell in zip(it, it):
- phandle = fdt_util.fdt32_to_cpu(phandle_cell)
- id = fdt_util.fdt32_to_cpu(id_cell)
- target_node = self._phandle_node[phandle]
- node.phandles.add(target_node)
-
-
- def GenerateStructs(self, structs):
- """Generate struct defintions for the platform data
-
- This writes out the body of a header file consisting of structure
- definitions for node in self._valid_nodes. See the documentation in
- README.of-plat for more information.
- """
- self.Out('#include <stdbool.h>\n')
- self.Out('#include <libfdt.h>\n')
-
- # Output the struct definition
- for name in sorted(structs):
- self.Out('struct %s%s {\n' % (STRUCT_PREFIX, name));
- for pname in sorted(structs[name]):
- prop = structs[name][pname]
- if self.IsPhandle(prop):
- # For phandles, include a reference to the target
- self.Out('\t%s%s[%d]' % (TabTo(2, 'struct phandle_2_cell'),
- Conv_name_to_c(prop.name),
- len(prop.value) / 2))
- else:
- ptype = TYPE_NAMES[prop.type]
- self.Out('\t%s%s' % (TabTo(2, ptype),
- Conv_name_to_c(prop.name)))
- if type(prop.value) == list:
- self.Out('[%d]' % len(prop.value))
- self.Out(';\n')
- self.Out('};\n')
-
- def OutputNode(self, node):
- """Output the C code for a node
-
- Args:
- node: node to output
- """
- struct_name = self.GetCompatName(node)
- var_name = Conv_name_to_c(node.name)
- self.Buf('static struct %s%s %s%s = {\n' %
- (STRUCT_PREFIX, struct_name, VAL_PREFIX, var_name))
- for pname, prop in node.props.items():
- if pname in PROP_IGNORE_LIST or pname[0] == '#':
- continue
- ptype = TYPE_NAMES[prop.type]
- member_name = Conv_name_to_c(prop.name)
- self.Buf('\t%s= ' % TabTo(3, '.' + member_name))
-
- # Special handling for lists
- if type(prop.value) == list:
- self.Buf('{')
- vals = []
- # For phandles, output a reference to the platform data
- # of the target node.
- if self.IsPhandle(prop):
- # Process the list as pairs of (phandle, id)
- it = iter(prop.value)
- for phandle_cell, id_cell in zip(it, it):
- phandle = fdt_util.fdt32_to_cpu(phandle_cell)
- id = fdt_util.fdt32_to_cpu(id_cell)
- target_node = self._phandle_node[phandle]
- name = Conv_name_to_c(target_node.name)
- vals.append('{&%s%s, %d}' % (VAL_PREFIX, name, id))
- else:
- for val in prop.value:
- vals.append(self.GetValue(prop.type, val))
- self.Buf(', '.join(vals))
- self.Buf('}')
- else:
- self.Buf(self.GetValue(prop.type, prop.value))
- self.Buf(',\n')
- self.Buf('};\n')
-
- # Add a device declaration
- self.Buf('U_BOOT_DEVICE(%s) = {\n' % var_name)
- self.Buf('\t.name\t\t= "%s",\n' % struct_name)
- self.Buf('\t.platdata\t= &%s%s,\n' % (VAL_PREFIX, var_name))
- self.Buf('\t.platdata_size\t= sizeof(%s%s),\n' %
- (VAL_PREFIX, var_name))
- self.Buf('};\n')
- self.Buf('\n')
+from optparse import OptionParser
+import os
+import sys
+import unittest
- self.Out(''.join(self.GetBuf()))
+# Bring in the patman libraries
+our_path = os.path.dirname(os.path.realpath(__file__))
+sys.path.append(os.path.join(our_path, '../patman'))
- def GenerateTables(self):
- """Generate device defintions for the platform data
+import dtb_platdata
- This writes out C platform data initialisation data and
- U_BOOT_DEVICE() declarations for each valid node. See the
- documentation in README.of-plat for more information.
- """
- self.Out('#include <common.h>\n')
- self.Out('#include <dm.h>\n')
- self.Out('#include <dt-structs.h>\n')
- self.Out('\n')
- nodes_to_output = list(self._valid_nodes)
+def run_tests():
+ """Run all the test we have for dtoc"""
+ import test_dtoc
- # Keep outputing nodes until there is none left
- while nodes_to_output:
- node = nodes_to_output[0]
- # Output all the node's dependencies first
- for req_node in node.phandles:
- if req_node in nodes_to_output:
- self.OutputNode(req_node)
- nodes_to_output.remove(req_node)
- self.OutputNode(node)
- nodes_to_output.remove(node)
+ result = unittest.TestResult()
+ sys.argv = [sys.argv[0]]
+ for module in (test_dtoc.TestDtoc,):
+ suite = unittest.TestLoader().loadTestsFromTestCase(module)
+ suite.run(result)
+ print result
+ for _, err in result.errors:
+ print err
+ for _, err in result.failures:
+ print err
-if __name__ != "__main__":
- pass
+if __name__ != '__main__':
+ sys.exit(1)
parser = OptionParser()
parser.add_option('-d', '--dtb-file', action='store',
@@ -415,22 +63,14 @@ parser.add_option('--include-disabled', action='store_true',
help='Include disabled nodes')
parser.add_option('-o', '--output', action='store', default='-',
help='Select output filename')
+parser.add_option('-t', '--test', action='store_true', dest='test',
+ default=False, help='run tests')
(options, args) = parser.parse_args()
-if not args:
- raise ValueError('Please specify a command: struct, platdata')
-
-plat = DtbPlatdata(options.dtb_file, options)
-plat.ScanDtb()
-plat.ScanTree()
-plat.SetupOutput(options.output)
-structs = plat.ScanStructs()
-plat.ScanPhandles()
+# Run our meagre tests
+if options.test:
+ run_tests()
-for cmd in args[0].split(','):
- if cmd == 'struct':
- plat.GenerateStructs(structs)
- elif cmd == 'platdata':
- plat.GenerateTables()
- else:
- raise ValueError("Unknown command '%s': (use: struct, platdata)" % cmd)
+else:
+ dtb_platdata.run_steps(args, options.dtb_file, options.include_disabled,
+ options.output)
diff --git a/tools/dtoc/dtoc_test.dts b/tools/dtoc/dtoc_test.dts
new file mode 100644
index 0000000000..1e86655975
--- /dev/null
+++ b/tools/dtoc/dtoc_test.dts
@@ -0,0 +1,12 @@
+/*
+ * Test device tree file for dtoc
+ *
+ * Copyright 2017 Google, Inc
+ *
+ * SPDX-License-Identifier: GPL-2.0+
+ */
+
+ /dts-v1/;
+
+/ {
+};
diff --git a/tools/dtoc/dtoc_test_aliases.dts b/tools/dtoc/dtoc_test_aliases.dts
new file mode 100644
index 0000000000..c727f185af
--- /dev/null
+++ b/tools/dtoc/dtoc_test_aliases.dts
@@ -0,0 +1,18 @@
+/*
+ * Test device tree file for dtoc
+ *
+ * Copyright 2017 Google, Inc
+ *
+ * SPDX-License-Identifier: GPL-2.0+
+ */
+
+ /dts-v1/;
+
+/ {
+ spl-test {
+ u-boot,dm-pre-reloc;
+ compatible = "compat1", "compat2.1-fred", "compat3";
+ intval = <1>;
+ };
+
+};
diff --git a/tools/dtoc/dtoc_test_empty.dts b/tools/dtoc/dtoc_test_empty.dts
new file mode 100644
index 0000000000..1e86655975
--- /dev/null
+++ b/tools/dtoc/dtoc_test_empty.dts
@@ -0,0 +1,12 @@
+/*
+ * Test device tree file for dtoc
+ *
+ * Copyright 2017 Google, Inc
+ *
+ * SPDX-License-Identifier: GPL-2.0+
+ */
+
+ /dts-v1/;
+
+/ {
+};
diff --git a/tools/dtoc/dtoc_test_phandle.dts b/tools/dtoc/dtoc_test_phandle.dts
new file mode 100644
index 0000000000..e9828a695b
--- /dev/null
+++ b/tools/dtoc/dtoc_test_phandle.dts
@@ -0,0 +1,23 @@
+/*
+ * Test device tree file for dtoc
+ *
+ * Copyright 2017 Google, Inc
+ *
+ * SPDX-License-Identifier: GPL-2.0+
+ */
+
+ /dts-v1/;
+
+/ {
+ phandle: phandle-target {
+ u-boot,dm-pre-reloc;
+ compatible = "target";
+ intval = <1>;
+ };
+
+ phandle-source {
+ u-boot,dm-pre-reloc;
+ compatible = "source";
+ clocks = <&phandle 1>;
+ };
+};
diff --git a/tools/dtoc/dtoc_test_simple.dts b/tools/dtoc/dtoc_test_simple.dts
new file mode 100644
index 0000000000..c736686263
--- /dev/null
+++ b/tools/dtoc/dtoc_test_simple.dts
@@ -0,0 +1,48 @@
+/*
+ * Test device tree file for dtoc
+ *
+ * Copyright 2017 Google, Inc
+ *
+ * SPDX-License-Identifier: GPL-2.0+
+ */
+
+ /dts-v1/;
+
+/ {
+ spl-test {
+ u-boot,dm-pre-reloc;
+ compatible = "sandbox,spl-test";
+ boolval;
+ intval = <1>;
+ intarray = <2 3 4>;
+ byteval = [05];
+ bytearray = [06];
+ longbytearray = [09 0a 0b 0c 0d 0e 0f 10 11];
+ stringval = "message";
+ stringarray = "multi-word", "message";
+ };
+
+ spl-test2 {
+ u-boot,dm-pre-reloc;
+ compatible = "sandbox,spl-test";
+ intval = <3>;
+ intarray = <5>;
+ byteval = [08];
+ bytearray = [01 23 34];
+ longbytearray = [09 0a 0b 0c];
+ stringval = "message2";
+ stringarray = "another", "multi-word", "message";
+ };
+
+ spl-test3 {
+ u-boot,dm-pre-reloc;
+ compatible = "sandbox,spl-test";
+ stringarray = "one";
+ };
+
+ spl-test4 {
+ u-boot,dm-pre-reloc;
+ compatible = "sandbox,spl-test.2";
+ };
+
+};
diff --git a/tools/dtoc/test_dtoc.py b/tools/dtoc/test_dtoc.py
new file mode 100644
index 0000000000..8b95c4124f
--- /dev/null
+++ b/tools/dtoc/test_dtoc.py
@@ -0,0 +1,271 @@
+#
+# Copyright (c) 2012 The Chromium OS Authors.
+#
+# SPDX-License-Identifier: GPL-2.0+
+#
+
+"""Tests for the dtb_platdata module
+
+This includes unit tests for some functions and functional tests for
+"""
+
+import collections
+import os
+import struct
+import unittest
+
+import dtb_platdata
+from dtb_platdata import conv_name_to_c
+from dtb_platdata import get_compat_name
+from dtb_platdata import get_value
+from dtb_platdata import tab_to
+import fdt
+import fdt_util
+import tools
+
+our_path = os.path.dirname(os.path.realpath(__file__))
+
+
+def get_dtb_file(dts_fname):
+ """Compile a .dts file to a .dtb
+
+ Args:
+ dts_fname: Filename of .dts file in the current directory
+
+ Returns:
+ Filename of compiled file in output directory
+ """
+ return fdt_util.EnsureCompiled(os.path.join(our_path, dts_fname))
+
+
+class TestDtoc(unittest.TestCase):
+ """Tests for dtoc"""
+ @classmethod
+ def setUpClass(cls):
+ tools.PrepareOutputDir(None)
+
+ @classmethod
+ def tearDownClass(cls):
+ tools._RemoveOutputDir()
+
+ def test_name(self):
+ """Test conversion of device tree names to C identifiers"""
+ self.assertEqual('serial_at_0x12', conv_name_to_c('serial@0x12'))
+ self.assertEqual('vendor_clock_frequency',
+ conv_name_to_c('vendor,clock-frequency'))
+ self.assertEqual('rockchip_rk3399_sdhci_5_1',
+ conv_name_to_c('rockchip,rk3399-sdhci-5.1'))
+
+ def test_tab_to(self):
+ """Test operation of tab_to() function"""
+ self.assertEqual('fred ', tab_to(0, 'fred'))
+ self.assertEqual('fred\t', tab_to(1, 'fred'))
+ self.assertEqual('fred was here ', tab_to(1, 'fred was here'))
+ self.assertEqual('fred was here\t\t', tab_to(3, 'fred was here'))
+ self.assertEqual('exactly8 ', tab_to(1, 'exactly8'))
+ self.assertEqual('exactly8\t', tab_to(2, 'exactly8'))
+
+ def test_get_value(self):
+ """Test operation of get_value() function"""
+ self.assertEqual('0x45',
+ get_value(fdt.TYPE_INT, struct.pack('>I', 0x45)))
+ self.assertEqual('0x45',
+ get_value(fdt.TYPE_BYTE, struct.pack('<I', 0x45)))
+ self.assertEqual('0x0',
+ get_value(fdt.TYPE_BYTE, struct.pack('>I', 0x45)))
+ self.assertEqual('"test"', get_value(fdt.TYPE_STRING, 'test'))
+ self.assertEqual('true', get_value(fdt.TYPE_BOOL, None))
+
+ def test_get_compat_name(self):
+ """Test operation of get_compat_name() function"""
+ Prop = collections.namedtuple('Prop', ['value'])
+ Node = collections.namedtuple('Node', ['props'])
+
+ prop = Prop(['rockchip,rk3399-sdhci-5.1', 'arasan,sdhci-5.1'])
+ node = Node({'compatible': prop})
+ self.assertEqual(('rockchip_rk3399_sdhci_5_1', ['arasan_sdhci_5_1']),
+ get_compat_name(node))
+
+ prop = Prop(['rockchip,rk3399-sdhci-5.1'])
+ node = Node({'compatible': prop})
+ self.assertEqual(('rockchip_rk3399_sdhci_5_1', []),
+ get_compat_name(node))
+
+ prop = Prop(['rockchip,rk3399-sdhci-5.1', 'arasan,sdhci-5.1', 'third'])
+ node = Node({'compatible': prop})
+ self.assertEqual(('rockchip_rk3399_sdhci_5_1',
+ ['arasan_sdhci_5_1', 'third']),
+ get_compat_name(node))
+
+ def test_empty_file(self):
+ """Test output from a device tree file with no nodes"""
+ dtb_file = get_dtb_file('dtoc_test_empty.dts')
+ output = tools.GetOutputFilename('output')
+ dtb_platdata.run_steps(['struct'], dtb_file, False, output)
+ with open(output) as infile:
+ lines = infile.read().splitlines()
+ self.assertEqual(['#include <stdbool.h>', '#include <libfdt.h>'], lines)
+
+ dtb_platdata.run_steps(['platdata'], dtb_file, False, output)
+ with open(output) as infile:
+ lines = infile.read().splitlines()
+ self.assertEqual(['#include <common.h>', '#include <dm.h>',
+ '#include <dt-structs.h>', ''], lines)
+
+ def test_simple(self):
+ """Test output from some simple nodes with various types of data"""
+ dtb_file = get_dtb_file('dtoc_test_simple.dts')
+ output = tools.GetOutputFilename('output')
+ dtb_platdata.run_steps(['struct'], dtb_file, False, output)
+ with open(output) as infile:
+ data = infile.read()
+ self.assertEqual('''#include <stdbool.h>
+#include <libfdt.h>
+struct dtd_sandbox_spl_test {
+\tbool\t\tboolval;
+\tunsigned char\tbytearray[3];
+\tunsigned char\tbyteval;
+\tfdt32_t\t\tintarray[4];
+\tfdt32_t\t\tintval;
+\tunsigned char\tlongbytearray[9];
+\tconst char *\tstringarray[3];
+\tconst char *\tstringval;
+};
+struct dtd_sandbox_spl_test_2 {
+};
+''', data)
+
+ dtb_platdata.run_steps(['platdata'], dtb_file, False, output)
+ with open(output) as infile:
+ data = infile.read()
+ self.assertEqual('''#include <common.h>
+#include <dm.h>
+#include <dt-structs.h>
+
+static struct dtd_sandbox_spl_test dtv_spl_test = {
+\t.bytearray\t\t= {0x6, 0x0, 0x0},
+\t.byteval\t\t= 0x5,
+\t.intval\t\t\t= 0x1,
+\t.longbytearray\t\t= {0x9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf, 0x10, 0x11},
+\t.stringval\t\t= "message",
+\t.boolval\t\t= true,
+\t.intarray\t\t= {0x2, 0x3, 0x4, 0x0},
+\t.stringarray\t\t= {"multi-word", "message", ""},
+};
+U_BOOT_DEVICE(spl_test) = {
+\t.name\t\t= "sandbox_spl_test",
+\t.platdata\t= &dtv_spl_test,
+\t.platdata_size\t= sizeof(dtv_spl_test),
+};
+
+static struct dtd_sandbox_spl_test dtv_spl_test2 = {
+\t.bytearray\t\t= {0x1, 0x23, 0x34},
+\t.byteval\t\t= 0x8,
+\t.intval\t\t\t= 0x3,
+\t.longbytearray\t\t= {0x9, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0},
+\t.stringval\t\t= "message2",
+\t.intarray\t\t= {0x5, 0x0, 0x0, 0x0},
+\t.stringarray\t\t= {"another", "multi-word", "message"},
+};
+U_BOOT_DEVICE(spl_test2) = {
+\t.name\t\t= "sandbox_spl_test",
+\t.platdata\t= &dtv_spl_test2,
+\t.platdata_size\t= sizeof(dtv_spl_test2),
+};
+
+static struct dtd_sandbox_spl_test dtv_spl_test3 = {
+\t.stringarray\t\t= {"one", "", ""},
+};
+U_BOOT_DEVICE(spl_test3) = {
+\t.name\t\t= "sandbox_spl_test",
+\t.platdata\t= &dtv_spl_test3,
+\t.platdata_size\t= sizeof(dtv_spl_test3),
+};
+
+static struct dtd_sandbox_spl_test_2 dtv_spl_test4 = {
+};
+U_BOOT_DEVICE(spl_test4) = {
+\t.name\t\t= "sandbox_spl_test_2",
+\t.platdata\t= &dtv_spl_test4,
+\t.platdata_size\t= sizeof(dtv_spl_test4),
+};
+
+''', data)
+
+ def test_phandle(self):
+ """Test output from a node containing a phandle reference"""
+ dtb_file = get_dtb_file('dtoc_test_phandle.dts')
+ output = tools.GetOutputFilename('output')
+ dtb_platdata.run_steps(['struct'], dtb_file, False, output)
+ with open(output) as infile:
+ data = infile.read()
+ self.assertEqual('''#include <stdbool.h>
+#include <libfdt.h>
+struct dtd_source {
+\tstruct phandle_2_cell clocks[1];
+};
+struct dtd_target {
+\tfdt32_t\t\tintval;
+};
+''', data)
+
+ dtb_platdata.run_steps(['platdata'], dtb_file, False, output)
+ with open(output) as infile:
+ data = infile.read()
+ self.assertEqual('''#include <common.h>
+#include <dm.h>
+#include <dt-structs.h>
+
+static struct dtd_target dtv_phandle_target = {
+\t.intval\t\t\t= 0x1,
+};
+U_BOOT_DEVICE(phandle_target) = {
+\t.name\t\t= "target",
+\t.platdata\t= &dtv_phandle_target,
+\t.platdata_size\t= sizeof(dtv_phandle_target),
+};
+
+static struct dtd_source dtv_phandle_source = {
+\t.clocks\t\t\t= {{&dtv_phandle_target, 1}},
+};
+U_BOOT_DEVICE(phandle_source) = {
+\t.name\t\t= "source",
+\t.platdata\t= &dtv_phandle_source,
+\t.platdata_size\t= sizeof(dtv_phandle_source),
+};
+
+''', data)
+
+ def test_aliases(self):
+ """Test output from a node with multiple compatible strings"""
+ dtb_file = get_dtb_file('dtoc_test_aliases.dts')
+ output = tools.GetOutputFilename('output')
+ dtb_platdata.run_steps(['struct'], dtb_file, False, output)
+ with open(output) as infile:
+ data = infile.read()
+ self.assertEqual('''#include <stdbool.h>
+#include <libfdt.h>
+struct dtd_compat1 {
+\tfdt32_t\t\tintval;
+};
+#define dtd_compat2_1_fred dtd_compat1
+#define dtd_compat3 dtd_compat1
+''', data)
+
+ dtb_platdata.run_steps(['platdata'], dtb_file, False, output)
+ with open(output) as infile:
+ data = infile.read()
+ self.assertEqual('''#include <common.h>
+#include <dm.h>
+#include <dt-structs.h>
+
+static struct dtd_compat1 dtv_spl_test = {
+\t.intval\t\t\t= 0x1,
+};
+U_BOOT_DEVICE(spl_test) = {
+\t.name\t\t= "compat1",
+\t.platdata\t= &dtv_spl_test,
+\t.platdata_size\t= sizeof(dtv_spl_test),
+};
+
+''', data)
diff --git a/tools/moveconfig.py b/tools/moveconfig.py
index 7aa96120a1..eb4927f278 100755
--- a/tools/moveconfig.py
+++ b/tools/moveconfig.py
@@ -115,6 +115,128 @@ use your own. Instead of modifying the list directly, you can give
them via environments.
+Tips and trips
+--------------
+
+To sync only X86 defconfigs:
+
+ ./tools/moveconfig.py -s -d <(grep -l X86 configs/*)
+
+or:
+
+ grep -l X86 configs/* | ./tools/moveconfig.py -s -d -
+
+To process CONFIG_CMD_FPGAD only for a subset of configs based on path match:
+
+ ls configs/{hrcon*,iocon*,strider*} | \
+ ./tools/moveconfig.py -Cy CONFIG_CMD_FPGAD -d -
+
+
+Finding implied CONFIGs
+-----------------------
+
+Some CONFIG options can be implied by others and this can help to reduce
+the size of the defconfig files. For example, CONFIG_X86 implies
+CONFIG_CMD_IRQ, so we can put 'imply CMD_IRQ' under 'config X86' and
+all x86 boards will have that option, avoiding adding CONFIG_CMD_IRQ to
+each of the x86 defconfig files.
+
+This tool can help find such configs. To use it, first build a database:
+
+ ./tools/moveconfig.py -b
+
+Then try to query it:
+
+ ./tools/moveconfig.py -i CONFIG_CMD_IRQ
+ CONFIG_CMD_IRQ found in 311/2384 defconfigs
+ 44 : CONFIG_SYS_FSL_ERRATUM_IFC_A002769
+ 41 : CONFIG_SYS_FSL_ERRATUM_A007075
+ 31 : CONFIG_SYS_FSL_DDR_VER_44
+ 28 : CONFIG_ARCH_P1010
+ 28 : CONFIG_SYS_FSL_ERRATUM_P1010_A003549
+ 28 : CONFIG_SYS_FSL_ERRATUM_SEC_A003571
+ 28 : CONFIG_SYS_FSL_ERRATUM_IFC_A003399
+ 25 : CONFIG_SYS_FSL_ERRATUM_A008044
+ 22 : CONFIG_ARCH_P1020
+ 21 : CONFIG_SYS_FSL_DDR_VER_46
+ 20 : CONFIG_MAX_PIRQ_LINKS
+ 20 : CONFIG_HPET_ADDRESS
+ 20 : CONFIG_X86
+ 20 : CONFIG_PCIE_ECAM_SIZE
+ 20 : CONFIG_IRQ_SLOT_COUNT
+ 20 : CONFIG_I8259_PIC
+ 20 : CONFIG_CPU_ADDR_BITS
+ 20 : CONFIG_RAMBASE
+ 20 : CONFIG_SYS_FSL_ERRATUM_A005871
+ 20 : CONFIG_PCIE_ECAM_BASE
+ 20 : CONFIG_X86_TSC_TIMER
+ 20 : CONFIG_I8254_TIMER
+ 20 : CONFIG_CMD_GETTIME
+ 19 : CONFIG_SYS_FSL_ERRATUM_A005812
+ 18 : CONFIG_X86_RUN_32BIT
+ 17 : CONFIG_CMD_CHIP_CONFIG
+ ...
+
+This shows a list of config options which might imply CONFIG_CMD_EEPROM along
+with how many defconfigs they cover. From this you can see that CONFIG_X86
+implies CONFIG_CMD_EEPROM. Therefore, instead of adding CONFIG_CMD_EEPROM to
+the defconfig of every x86 board, you could add a single imply line to the
+Kconfig file:
+
+ config X86
+ bool "x86 architecture"
+ ...
+ imply CMD_EEPROM
+
+That will cover 20 defconfigs. Many of the options listed are not suitable as
+they are not related. E.g. it would be odd for CONFIG_CMD_GETTIME to imply
+CMD_EEPROM.
+
+Using this search you can reduce the size of moveconfig patches.
+
+You can automatically add 'imply' statements in the Kconfig with the -a
+option:
+
+ ./tools/moveconfig.py -s -i CONFIG_SCSI \
+ -a CONFIG_ARCH_LS1021A,CONFIG_ARCH_LS1043A
+
+This will add 'imply SCSI' to the two CONFIG options mentioned, assuming that
+the database indicates that they do actually imply CONFIG_SCSI and do not
+already have an 'imply SCSI'.
+
+The output shows where the imply is added:
+
+ 18 : CONFIG_ARCH_LS1021A arch/arm/cpu/armv7/ls102xa/Kconfig:1
+ 13 : CONFIG_ARCH_LS1043A arch/arm/cpu/armv8/fsl-layerscape/Kconfig:11
+ 12 : CONFIG_ARCH_LS1046A arch/arm/cpu/armv8/fsl-layerscape/Kconfig:31
+
+The first number is the number of boards which can avoid having a special
+CONFIG_SCSI option in their defconfig file if this 'imply' is added.
+The location at the right is the Kconfig file and line number where the config
+appears. For example, adding 'imply CONFIG_SCSI' to the 'config ARCH_LS1021A'
+in arch/arm/cpu/armv7/ls102xa/Kconfig at line 1 will help 18 boards to reduce
+the size of their defconfig files.
+
+If you want to add an 'imply' to every imply config in the list, you can use
+
+ ./tools/moveconfig.py -s -i CONFIG_SCSI -a all
+
+To control which ones are displayed, use -I <list> where list is a list of
+options (use '-I help' to see possible options and their meaning).
+
+To skip showing you options that already have an 'imply' attached, use -A.
+
+When you have finished adding 'imply' options you can regenerate the
+defconfig files for affected boards with something like:
+
+ git show --stat | ./tools/moveconfig.py -s -d -
+
+This will regenerate only those defconfigs changed in the current commit.
+If you start with (say) 100 defconfigs being changed in the commit, and add
+a few 'imply' options as above, then regenerate, hopefully you can reduce the
+number of defconfigs changed in the commit.
+
+
Available options
-----------------
@@ -128,7 +250,7 @@ Available options
-d, --defconfigs
Specify a file containing a list of defconfigs to move. The defconfig
- files can be given with shell-style wildcards.
+ files can be given with shell-style wildcards. Use '-' to read from stdin.
-n, --dry-run
Perform a trial run that does not make any changes. It is useful to
@@ -169,7 +291,8 @@ Available options
-y, --yes
Instead of prompting, automatically go ahead with all operations. This
- includes cleaning up headers and CONFIG_SYS_EXTRA_OPTIONS.
+ includes cleaning up headers, CONFIG_SYS_EXTRA_OPTIONS, the config whitelist
+ and the README.
To see the complete list of supported options, run
@@ -177,6 +300,7 @@ To see the complete list of supported options, run
"""
+import collections
import copy
import difflib
import filecmp
@@ -185,13 +309,18 @@ import glob
import multiprocessing
import optparse
import os
+import Queue
import re
import shutil
import subprocess
import sys
import tempfile
+import threading
import time
+sys.path.append(os.path.join(os.path.dirname(__file__), 'buildman'))
+import kconfiglib
+
SHOW_GNU_MAKE = 'scripts/show-gnu-make'
SLEEP_TIME=0.03
@@ -244,6 +373,11 @@ COLOR_LIGHT_PURPLE = '1;35'
COLOR_LIGHT_CYAN = '1;36'
COLOR_WHITE = '1;37'
+AUTO_CONF_PATH = 'include/config/auto.conf'
+CONFIG_DATABASE = 'moveconfig.db'
+
+CONFIG_LEN = len('CONFIG_')
+
### helper functions ###
def get_devnull():
"""Get the file object of '/dev/null' device."""
@@ -278,15 +412,47 @@ def get_make_cmd():
sys.exit('GNU Make not found')
return ret[0].rstrip()
+def get_matched_defconfig(line):
+ """Get the defconfig files that match a pattern
+
+ Args:
+ line: Path or filename to match, e.g. 'configs/snow_defconfig' or
+ 'k2*_defconfig'. If no directory is provided, 'configs/' is
+ prepended
+
+ Returns:
+ a list of matching defconfig files
+ """
+ dirname = os.path.dirname(line)
+ if dirname:
+ pattern = line
+ else:
+ pattern = os.path.join('configs', line)
+ return glob.glob(pattern) + glob.glob(pattern + '_defconfig')
+
def get_matched_defconfigs(defconfigs_file):
- """Get all the defconfig files that match the patterns in a file."""
+ """Get all the defconfig files that match the patterns in a file.
+
+ Args:
+ defconfigs_file: File containing a list of defconfigs to process, or
+ '-' to read the list from stdin
+
+ Returns:
+ A list of paths to defconfig files, with no duplicates
+ """
defconfigs = []
- for i, line in enumerate(open(defconfigs_file)):
+ if defconfigs_file == '-':
+ fd = sys.stdin
+ defconfigs_file = 'stdin'
+ else:
+ fd = open(defconfigs_file)
+ for i, line in enumerate(fd):
line = line.strip()
if not line:
continue # skip blank lines silently
- pattern = os.path.join('configs', line)
- matched = glob.glob(pattern) + glob.glob(pattern + '_defconfig')
+ if ' ' in line:
+ line = line.split(' ')[0] # handle 'git log' input
+ matched = get_matched_defconfig(line)
if not matched:
print >> sys.stderr, "warning: %s:%d: no defconfig matched '%s'" % \
(defconfigs_file, i + 1, line)
@@ -682,6 +848,19 @@ class Progress:
print ' %d defconfigs out of %d\r' % (self.current, self.total),
sys.stdout.flush()
+
+class KconfigScanner:
+ """Kconfig scanner."""
+
+ def __init__(self):
+ """Scan all the Kconfig files and create a Config object."""
+ # Define environment variables referenced from Kconfig
+ os.environ['srctree'] = os.getcwd()
+ os.environ['UBOOTVERSION'] = 'dummy'
+ os.environ['KCONFIG_OBJDIR'] = ''
+ self.conf = kconfiglib.Config()
+
+
class KconfigParser:
"""A parser of .config and include/autoconf.mk."""
@@ -703,8 +882,7 @@ class KconfigParser:
self.autoconf = os.path.join(build_dir, 'include', 'autoconf.mk')
self.spl_autoconf = os.path.join(build_dir, 'spl', 'include',
'autoconf.mk')
- self.config_autoconf = os.path.join(build_dir, 'include', 'config',
- 'auto.conf')
+ self.config_autoconf = os.path.join(build_dir, AUTO_CONF_PATH)
self.defconfig = os.path.join(build_dir, 'defconfig')
def get_cross_compile(self):
@@ -890,6 +1068,34 @@ class KconfigParser:
return log
+
+class DatabaseThread(threading.Thread):
+ """This thread processes results from Slot threads.
+
+ It collects the data in the master config directary. There is only one
+ result thread, and this helps to serialise the build output.
+ """
+ def __init__(self, config_db, db_queue):
+ """Set up a new result thread
+
+ Args:
+ builder: Builder which will be sent each result
+ """
+ threading.Thread.__init__(self)
+ self.config_db = config_db
+ self.db_queue= db_queue
+
+ def run(self):
+ """Called to start up the result thread.
+
+ We collect the next result job and pass it on to the build.
+ """
+ while True:
+ defconfig, configs = self.db_queue.get()
+ self.config_db[defconfig] = configs
+ self.db_queue.task_done()
+
+
class Slot:
"""A slot to store a subprocess.
@@ -899,7 +1105,8 @@ class Slot:
for faster processing.
"""
- def __init__(self, configs, options, progress, devnull, make_cmd, reference_src_dir):
+ def __init__(self, configs, options, progress, devnull, make_cmd,
+ reference_src_dir, db_queue):
"""Create a new process slot.
Arguments:
@@ -910,6 +1117,7 @@ class Slot:
make_cmd: command name of GNU Make.
reference_src_dir: Determine the true starting config state from this
source tree.
+ db_queue: output queue to write config info for the database
"""
self.options = options
self.progress = progress
@@ -917,6 +1125,7 @@ class Slot:
self.devnull = devnull
self.make_cmd = (make_cmd, 'O=' + self.build_dir)
self.reference_src_dir = reference_src_dir
+ self.db_queue = db_queue
self.parser = KconfigParser(configs, options, self.build_dir)
self.state = STATE_IDLE
self.failed_boards = set()
@@ -992,6 +1201,8 @@ class Slot:
if self.current_src_dir:
self.current_src_dir = None
self.do_defconfig()
+ elif self.options.build_db:
+ self.do_build_db()
else:
self.do_savedefconfig()
elif self.state == STATE_SAVEDEFCONFIG:
@@ -1022,7 +1233,7 @@ class Slot:
self.state = STATE_DEFCONFIG
def do_autoconf(self):
- """Run 'make include/config/auto.conf'."""
+ """Run 'make AUTO_CONF_PATH'."""
self.cross_compile = self.parser.get_cross_compile()
if self.cross_compile is None:
@@ -1035,12 +1246,23 @@ class Slot:
if self.cross_compile:
cmd.append('CROSS_COMPILE=%s' % self.cross_compile)
cmd.append('KCONFIG_IGNORE_DUPLICATES=1')
- cmd.append('include/config/auto.conf')
+ cmd.append(AUTO_CONF_PATH)
self.ps = subprocess.Popen(cmd, stdout=self.devnull,
stderr=subprocess.PIPE,
cwd=self.current_src_dir)
self.state = STATE_AUTOCONF
+ def do_build_db(self):
+ """Add the board to the database"""
+ configs = {}
+ with open(os.path.join(self.build_dir, AUTO_CONF_PATH)) as fd:
+ for line in fd.readlines():
+ if line.startswith('CONFIG'):
+ config, value = line.split('=', 1)
+ configs[config] = value.rstrip()
+ self.db_queue.put([self.defconfig, configs])
+ self.finish(True)
+
def do_savedefconfig(self):
"""Update the .config and run 'make savedefconfig'."""
@@ -1123,7 +1345,7 @@ class Slots:
"""Controller of the array of subprocess slots."""
- def __init__(self, configs, options, progress, reference_src_dir):
+ def __init__(self, configs, options, progress, reference_src_dir, db_queue):
"""Create a new slots controller.
Arguments:
@@ -1132,6 +1354,7 @@ class Slots:
progress: A progress indicator.
reference_src_dir: Determine the true starting config state from this
source tree.
+ db_queue: output queue to write config info for the database
"""
self.options = options
self.slots = []
@@ -1139,7 +1362,7 @@ class Slots:
make_cmd = get_make_cmd()
for i in range(options.jobs):
self.slots.append(Slot(configs, options, progress, devnull,
- make_cmd, reference_src_dir))
+ make_cmd, reference_src_dir, db_queue))
def add(self, defconfig):
"""Add a new subprocess if a vacant slot is found.
@@ -1251,7 +1474,7 @@ class ReferenceSource:
return self.src_dir
-def move_config(configs, options):
+def move_config(configs, options, db_queue):
"""Move config options to defconfig files.
Arguments:
@@ -1261,6 +1484,8 @@ def move_config(configs, options):
if len(configs) == 0:
if options.force_sync:
print 'No CONFIG is specified. You are probably syncing defconfigs.',
+ elif options.build_db:
+ print 'Building %s database' % CONFIG_DATABASE
else:
print 'Neither CONFIG nor --force-sync is specified. Nothing will happen.',
else:
@@ -1279,7 +1504,7 @@ def move_config(configs, options):
defconfigs = get_all_defconfigs()
progress = Progress(len(defconfigs))
- slots = Slots(configs, options, progress, reference_src_dir)
+ slots = Slots(configs, options, progress, reference_src_dir, db_queue)
# Main loop to process defconfig files:
# Add a new subprocess into a vacant slot.
@@ -1298,6 +1523,304 @@ def move_config(configs, options):
slots.show_failed_boards()
slots.show_suspicious_boards()
+def find_kconfig_rules(kconf, config, imply_config):
+ """Check whether a config has a 'select' or 'imply' keyword
+
+ Args:
+ kconf: Kconfig.Config object
+ config: Name of config to check (without CONFIG_ prefix)
+ imply_config: Implying config (without CONFIG_ prefix) which may or
+ may not have an 'imply' for 'config')
+
+ Returns:
+ Symbol object for 'config' if found, else None
+ """
+ sym = kconf.get_symbol(imply_config)
+ if sym:
+ for sel in sym.get_selected_symbols():
+ if sel.get_name() == config:
+ return sym
+ return None
+
+def check_imply_rule(kconf, config, imply_config):
+ """Check if we can add an 'imply' option
+
+ This finds imply_config in the Kconfig and looks to see if it is possible
+ to add an 'imply' for 'config' to that part of the Kconfig.
+
+ Args:
+ kconf: Kconfig.Config object
+ config: Name of config to check (without CONFIG_ prefix)
+ imply_config: Implying config (without CONFIG_ prefix) which may or
+ may not have an 'imply' for 'config')
+
+ Returns:
+ tuple:
+ filename of Kconfig file containing imply_config, or None if none
+ line number within the Kconfig file, or 0 if none
+ message indicating the result
+ """
+ sym = kconf.get_symbol(imply_config)
+ if not sym:
+ return 'cannot find sym'
+ locs = sym.get_def_locations()
+ if len(locs) != 1:
+ return '%d locations' % len(locs)
+ fname, linenum = locs[0]
+ cwd = os.getcwd()
+ if cwd and fname.startswith(cwd):
+ fname = fname[len(cwd) + 1:]
+ file_line = ' at %s:%d' % (fname, linenum)
+ with open(fname) as fd:
+ data = fd.read().splitlines()
+ if data[linenum - 1] != 'config %s' % imply_config:
+ return None, 0, 'bad sym format %s%s' % (data[linenum], file_line)
+ return fname, linenum, 'adding%s' % file_line
+
+def add_imply_rule(config, fname, linenum):
+ """Add a new 'imply' option to a Kconfig
+
+ Args:
+ config: config option to add an imply for (without CONFIG_ prefix)
+ fname: Kconfig filename to update
+ linenum: Line number to place the 'imply' before
+
+ Returns:
+ Message indicating the result
+ """
+ file_line = ' at %s:%d' % (fname, linenum)
+ data = open(fname).read().splitlines()
+ linenum -= 1
+
+ for offset, line in enumerate(data[linenum:]):
+ if line.strip().startswith('help') or not line:
+ data.insert(linenum + offset, '\timply %s' % config)
+ with open(fname, 'w') as fd:
+ fd.write('\n'.join(data) + '\n')
+ return 'added%s' % file_line
+
+ return 'could not insert%s'
+
+(IMPLY_MIN_2, IMPLY_TARGET, IMPLY_CMD, IMPLY_NON_ARCH_BOARD) = (
+ 1, 2, 4, 8)
+
+IMPLY_FLAGS = {
+ 'min2': [IMPLY_MIN_2, 'Show options which imply >2 boards (normally >5)'],
+ 'target': [IMPLY_TARGET, 'Allow CONFIG_TARGET_... options to imply'],
+ 'cmd': [IMPLY_CMD, 'Allow CONFIG_CMD_... to imply'],
+ 'non-arch-board': [
+ IMPLY_NON_ARCH_BOARD,
+ 'Allow Kconfig options outside arch/ and /board/ to imply'],
+};
+
+def do_imply_config(config_list, add_imply, imply_flags, skip_added,
+ check_kconfig=True, find_superset=False):
+ """Find CONFIG options which imply those in the list
+
+ Some CONFIG options can be implied by others and this can help to reduce
+ the size of the defconfig files. For example, CONFIG_X86 implies
+ CONFIG_CMD_IRQ, so we can put 'imply CMD_IRQ' under 'config X86' and
+ all x86 boards will have that option, avoiding adding CONFIG_CMD_IRQ to
+ each of the x86 defconfig files.
+
+ This function uses the moveconfig database to find such options. It
+ displays a list of things that could possibly imply those in the list.
+ The algorithm ignores any that start with CONFIG_TARGET since these
+ typically refer to only a few defconfigs (often one). It also does not
+ display a config with less than 5 defconfigs.
+
+ The algorithm works using sets. For each target config in config_list:
+ - Get the set 'defconfigs' which use that target config
+ - For each config (from a list of all configs):
+ - Get the set 'imply_defconfig' of defconfigs which use that config
+ -
+ - If imply_defconfigs contains anything not in defconfigs then
+ this config does not imply the target config
+
+ Params:
+ config_list: List of CONFIG options to check (each a string)
+ add_imply: Automatically add an 'imply' for each config.
+ imply_flags: Flags which control which implying configs are allowed
+ (IMPLY_...)
+ skip_added: Don't show options which already have an imply added.
+ check_kconfig: Check if implied symbols already have an 'imply' or
+ 'select' for the target config, and show this information if so.
+ find_superset: True to look for configs which are a superset of those
+ already found. So for example if CONFIG_EXYNOS5 implies an option,
+ but CONFIG_EXYNOS covers a larger set of defconfigs and also
+ implies that option, this will drop the former in favour of the
+ latter. In practice this option has not proved very used.
+
+ Note the terminoloy:
+ config - a CONFIG_XXX options (a string, e.g. 'CONFIG_CMD_EEPROM')
+ defconfig - a defconfig file (a string, e.g. 'configs/snow_defconfig')
+ """
+ kconf = KconfigScanner().conf if check_kconfig else None
+ if add_imply and add_imply != 'all':
+ add_imply = add_imply.split()
+
+ # key is defconfig name, value is dict of (CONFIG_xxx, value)
+ config_db = {}
+
+ # Holds a dict containing the set of defconfigs that contain each config
+ # key is config, value is set of defconfigs using that config
+ defconfig_db = collections.defaultdict(set)
+
+ # Set of all config options we have seen
+ all_configs = set()
+
+ # Set of all defconfigs we have seen
+ all_defconfigs = set()
+
+ # Read in the database
+ configs = {}
+ with open(CONFIG_DATABASE) as fd:
+ for line in fd.readlines():
+ line = line.rstrip()
+ if not line: # Separator between defconfigs
+ config_db[defconfig] = configs
+ all_defconfigs.add(defconfig)
+ configs = {}
+ elif line[0] == ' ': # CONFIG line
+ config, value = line.strip().split('=', 1)
+ configs[config] = value
+ defconfig_db[config].add(defconfig)
+ all_configs.add(config)
+ else: # New defconfig
+ defconfig = line
+
+ # Work through each target config option in tern, independently
+ for config in config_list:
+ defconfigs = defconfig_db.get(config)
+ if not defconfigs:
+ print '%s not found in any defconfig' % config
+ continue
+
+ # Get the set of defconfigs without this one (since a config cannot
+ # imply itself)
+ non_defconfigs = all_defconfigs - defconfigs
+ num_defconfigs = len(defconfigs)
+ print '%s found in %d/%d defconfigs' % (config, num_defconfigs,
+ len(all_configs))
+
+ # This will hold the results: key=config, value=defconfigs containing it
+ imply_configs = {}
+ rest_configs = all_configs - set([config])
+
+ # Look at every possible config, except the target one
+ for imply_config in rest_configs:
+ if 'ERRATUM' in imply_config:
+ continue
+ if not (imply_flags & IMPLY_CMD):
+ if 'CONFIG_CMD' in imply_config:
+ continue
+ if not (imply_flags & IMPLY_TARGET):
+ if 'CONFIG_TARGET' in imply_config:
+ continue
+
+ # Find set of defconfigs that have this config
+ imply_defconfig = defconfig_db[imply_config]
+
+ # Get the intersection of this with defconfigs containing the
+ # target config
+ common_defconfigs = imply_defconfig & defconfigs
+
+ # Get the set of defconfigs containing this config which DO NOT
+ # also contain the taret config. If this set is non-empty it means
+ # that this config affects other defconfigs as well as (possibly)
+ # the ones affected by the target config. This means it implies
+ # things we don't want to imply.
+ not_common_defconfigs = imply_defconfig & non_defconfigs
+ if not_common_defconfigs:
+ continue
+
+ # If there are common defconfigs, imply_config may be useful
+ if common_defconfigs:
+ skip = False
+ if find_superset:
+ for prev in imply_configs.keys():
+ prev_count = len(imply_configs[prev])
+ count = len(common_defconfigs)
+ if (prev_count > count and
+ (imply_configs[prev] & common_defconfigs ==
+ common_defconfigs)):
+ # skip imply_config because prev is a superset
+ skip = True
+ break
+ elif count > prev_count:
+ # delete prev because imply_config is a superset
+ del imply_configs[prev]
+ if not skip:
+ imply_configs[imply_config] = common_defconfigs
+
+ # Now we have a dict imply_configs of configs which imply each config
+ # The value of each dict item is the set of defconfigs containing that
+ # config. Rank them so that we print the configs that imply the largest
+ # number of defconfigs first.
+ ranked_iconfigs = sorted(imply_configs,
+ key=lambda k: len(imply_configs[k]), reverse=True)
+ kconfig_info = ''
+ cwd = os.getcwd()
+ add_list = collections.defaultdict(list)
+ for iconfig in ranked_iconfigs:
+ num_common = len(imply_configs[iconfig])
+
+ # Don't bother if there are less than 5 defconfigs affected.
+ if num_common < (2 if imply_flags & IMPLY_MIN_2 else 5):
+ continue
+ missing = defconfigs - imply_configs[iconfig]
+ missing_str = ', '.join(missing) if missing else 'all'
+ missing_str = ''
+ show = True
+ if kconf:
+ sym = find_kconfig_rules(kconf, config[CONFIG_LEN:],
+ iconfig[CONFIG_LEN:])
+ kconfig_info = ''
+ if sym:
+ locs = sym.get_def_locations()
+ if len(locs) == 1:
+ fname, linenum = locs[0]
+ if cwd and fname.startswith(cwd):
+ fname = fname[len(cwd) + 1:]
+ kconfig_info = '%s:%d' % (fname, linenum)
+ if skip_added:
+ show = False
+ else:
+ sym = kconf.get_symbol(iconfig[CONFIG_LEN:])
+ fname = ''
+ if sym:
+ locs = sym.get_def_locations()
+ if len(locs) == 1:
+ fname, linenum = locs[0]
+ if cwd and fname.startswith(cwd):
+ fname = fname[len(cwd) + 1:]
+ in_arch_board = not sym or (fname.startswith('arch') or
+ fname.startswith('board'))
+ if (not in_arch_board and
+ not (imply_flags & IMPLY_NON_ARCH_BOARD)):
+ continue
+
+ if add_imply and (add_imply == 'all' or
+ iconfig in add_imply):
+ fname, linenum, kconfig_info = (check_imply_rule(kconf,
+ config[CONFIG_LEN:], iconfig[CONFIG_LEN:]))
+ if fname:
+ add_list[fname].append(linenum)
+
+ if show and kconfig_info != 'skip':
+ print '%5d : %-30s%-25s %s' % (num_common, iconfig.ljust(30),
+ kconfig_info, missing_str)
+
+ # Having collected a list of things to add, now we add them. We process
+ # each file from the largest line number to the smallest so that
+ # earlier additions do not affect our line numbers. E.g. if we added an
+ # imply at line 20 it would change the position of each line after
+ # that.
+ for fname, linenums in add_list.iteritems():
+ for linenum in sorted(linenums, reverse=True):
+ add_imply_rule(config[CONFIG_LEN:], fname, linenum)
+
+
def main():
try:
cpu_count = multiprocessing.cpu_count()
@@ -1306,12 +1829,26 @@ def main():
parser = optparse.OptionParser()
# Add options here
+ parser.add_option('-a', '--add-imply', type='string', default='',
+ help='comma-separated list of CONFIG options to add '
+ "an 'imply' statement to for the CONFIG in -i")
+ parser.add_option('-A', '--skip-added', action='store_true', default=False,
+ help="don't show options which are already marked as "
+ 'implying others')
+ parser.add_option('-b', '--build-db', action='store_true', default=False,
+ help='build a CONFIG database')
parser.add_option('-c', '--color', action='store_true', default=False,
help='display the log in color')
parser.add_option('-C', '--commit', action='store_true', default=False,
help='Create a git commit for the operation')
parser.add_option('-d', '--defconfigs', type='string',
- help='a file containing a list of defconfigs to move')
+ help='a file containing a list of defconfigs to move, '
+ "one per line (for example 'snow_defconfig') "
+ "or '-' to read from stdin")
+ parser.add_option('-i', '--imply', action='store_true', default=False,
+ help='find options which imply others')
+ parser.add_option('-I', '--imply-flags', type='string', default='',
+ help="control the -i option ('help' for help")
parser.add_option('-n', '--dry-run', action='store_true', default=False,
help='perform a trial run (show log with no changes)')
parser.add_option('-e', '--exit-on-error', action='store_true',
@@ -1336,7 +1873,8 @@ def main():
(options, configs) = parser.parse_args()
- if len(configs) == 0 and not options.force_sync:
+ if len(configs) == 0 and not any((options.force_sync, options.build_db,
+ options.imply)):
parser.print_usage()
sys.exit(1)
@@ -1346,10 +1884,32 @@ def main():
check_top_directory()
+ if options.imply:
+ imply_flags = 0
+ for flag in options.imply_flags.split():
+ if flag == 'help' or flag not in IMPLY_FLAGS:
+ print "Imply flags: (separate with ',')"
+ for name, info in IMPLY_FLAGS.iteritems():
+ print ' %-15s: %s' % (name, info[1])
+ parser.print_usage()
+ sys.exit(1)
+ imply_flags |= IMPLY_FLAGS[flag][0]
+
+ do_imply_config(configs, options.add_imply, imply_flags,
+ options.skip_added)
+ return
+
+ config_db = {}
+ db_queue = Queue.Queue()
+ t = DatabaseThread(config_db, db_queue)
+ t.setDaemon(True)
+ t.start()
+
if not options.cleanup_headers_only:
check_clean_directory()
update_cross_compile(options.color)
- move_config(configs, options)
+ move_config(configs, options, db_queue)
+ db_queue.join()
if configs:
cleanup_headers(configs, options)
@@ -1369,5 +1929,13 @@ def main():
msg += '\n\nRsync all defconfig files using moveconfig.py'
subprocess.call(['git', 'commit', '-s', '-m', msg])
+ if options.build_db:
+ with open(CONFIG_DATABASE, 'w') as fd:
+ for defconfig, configs in config_db.iteritems():
+ print >>fd, '%s' % defconfig
+ for config in sorted(configs.keys()):
+ print >>fd, ' %s=%s' % (config, configs[config])
+ print >>fd
+
if __name__ == '__main__':
main()