binman refactoring to improve section handling
bloblist - allow it to be allocated
sandbox config-header cleanup

# gpg: Signature made Sun 05 Dec 2021 10:14:24 PM EST
# gpg:                using RSA key B25C0022AF86A7CC1655B6277F173A3E9008ADE6
# gpg:                issuer "sjg@chromium.org"
# gpg: Good signature from "Simon Glass <sjg@chromium.org>" [unknown]
# gpg: WARNING: This key is not certified with a trusted signature!
# gpg:          There is no indication that the signature belongs to the owner.
# Primary key fingerprint: B25C 0022 AF86 A7CC 1655  B627 7F17 3A3E 9008 ADE6
This commit is contained in:
Tom Rini
2021-12-05 22:42:07 -05:00
65 changed files with 1083 additions and 266 deletions

View File

@@ -1123,6 +1123,7 @@ endif
$(CONFIG_WATCHDOG)$(CONFIG_HW_WATCHDOG))
$(call deprecated,CONFIG_DM_ETH,Ethernet drivers,v2020.07,$(CONFIG_NET))
$(call deprecated,CONFIG_DM_I2C,I2C drivers,v2022.04,$(CONFIG_SYS_I2C_LEGACY))
$(call deprecated,CONFIG_DM_KEYBOARD,Keyboard drivers,v2022.10,$(CONFIG_KEYBOARD))
@# Check that this build does not use CONFIG options that we do not
@# know about unless they are in Kconfig. All the existing CONFIG
@# options are whitelisted, so new ones should not be added.
@@ -1303,11 +1304,13 @@ default_dt := $(if $(DEVICE_TREE),$(DEVICE_TREE),$(CONFIG_DEFAULT_DEVICE_TREE))
quiet_cmd_binman = BINMAN $@
cmd_binman = $(srctree)/tools/binman/binman $(if $(BINMAN_DEBUG),-D) \
$(foreach f,$(BINMAN_TOOLPATHS),--toolpath $(f)) \
--toolpath $(objtree)/tools \
$(if $(BINMAN_VERBOSE),-v$(BINMAN_VERBOSE)) \
build -u -d u-boot.dtb -O . -m --allow-missing \
-I . -I $(srctree) -I $(srctree)/board/$(BOARDDIR) \
-I arch/$(ARCH)/dts -a of-list=$(CONFIG_OF_LIST) \
$(foreach f,$(BINMAN_INDIRS),-I $(f)) \
-a atf-bl31-path=${BL31} \
-a opensbi-path=${OPENSBI} \
-a default-dt=$(default_dt) \

11
README
View File

@@ -772,9 +772,6 @@ The following options need to be configured:
least one non-MTD partition type as well.
- IDE Reset method:
CONFIG_IDE_RESET_ROUTINE - this is defined in several
board configurations files but used nowhere!
CONFIG_IDE_RESET - is this is defined, IDE Reset will
be performed by calling the function
ide_set_reset(int reset)
@@ -1063,14 +1060,6 @@ The following options need to be configured:
- Keyboard Support:
See Kconfig help for available keyboard drivers.
CONFIG_KEYBOARD
Define this to enable a custom keyboard support.
This simply calls drv_keyboard_init() which must be
defined in your board-specific files. This option is deprecated
and is only used by novena. For new boards, use driver model
instead.
- Video support:
CONFIG_FSL_DIU_FB
Enable the Freescale DIU video driver. Reference boards for

View File

@@ -194,6 +194,7 @@ config SANDBOX
imply PHY_FIXED
imply DM_DSA
imply CMD_EXTENSION
imply KEYBOARD
config SH
bool "SuperH architecture"

View File

@@ -23,6 +23,7 @@ config ARCH_EXYNOS5
imply CMD_HASH
imply CRC32_VERIFY
imply HASH_VERIFY
imply KEYBOARD
imply USB_ETHER_ASIX
imply USB_ETHER_RTL8152
imply USB_ETHER_SMSC95XX

View File

@@ -67,8 +67,6 @@
*/
#ifdef CONFIG_IDE
#define __io
/* Needs byte-swapping for ATA data register */
#define CONFIG_IDE_SWAP_IO
/* Data, registers and alternate blocks are at the same offset */
#define CONFIG_SYS_ATA_DATA_OFFSET (0x0100)
#define CONFIG_SYS_ATA_REG_OFFSET (0x0100)

View File

@@ -51,9 +51,6 @@
/* The FMAN driver uses the PHYLIB infrastructure */
/* All PPC boards must swap IDE bytes */
#define CONFIG_IDE_SWAP_IO
#if defined(CONFIG_DM_SERIAL) && !defined(CONFIG_CLK_MPC83XX)
/*
* TODO: Convert this to a clock driver exists that can give us the UART

View File

@@ -68,4 +68,14 @@ config SANDBOX_BITS_PER_LONG
default 32 if HOST_32BIT
default 64 if HOST_64BIT
config SYS_FDT_LOAD_ADDR
hex "Address at which to load devicetree"
default 0x100
help
With sandbox the devicetree is loaded into the emulated RAM. This sets
the address that is used. There must be enough space at this address
to load the full devicetree without it overwriting anything else.
See `doc/arch/sandbox.rst` for more information.
endmenu

View File

@@ -638,7 +638,7 @@ int os_get_filesize(const char *fname, loff_t *size)
void os_putc(int ch)
{
putchar(ch);
fputc(ch, stdout);
}
void os_puts(const char *str)

View File

@@ -2350,6 +2350,8 @@ config CMD_LOG
config CMD_TRACE
bool "trace - Support tracing of function calls and timing"
depends on TRACE
default y
help
Enables a command to control using of function tracing within
U-Boot. This allows recording of call traces including timing

View File

@@ -78,7 +78,7 @@ static int do_host_info(struct cmd_tbl *cmdtp, int flag, int argc,
if (argc < 1 || argc > 2)
return CMD_RET_USAGE;
int min_dev = 0;
int max_dev = CONFIG_HOST_MAX_DEVICES - 1;
int max_dev = SANDBOX_HOST_MAX_DEVICES - 1;
if (argc >= 2) {
char *ep;
char *dev_str = argv[1];

View File

@@ -727,6 +727,8 @@ config TPL_BLOBLIST
This enables a bloblist in TPL. The bloblist is set up in TPL and
passed to SPL and U-Boot proper.
if BLOBLIST
config BLOBLIST_SIZE
hex "Size of bloblist"
depends on BLOBLIST
@@ -737,17 +739,24 @@ config BLOBLIST_SIZE
is set up in the first part of U-Boot to run (TPL, SPL or U-Boot
proper), and this sane bloblist is used for subsequent stages.
config BLOBLIST_ALLOC
bool "Allocate bloblist"
help
Allocate the bloblist using malloc(). This avoids the need to
specify a fixed address on systems where this is unknown or can
change at runtime.
config BLOBLIST_ADDR
hex "Address of bloblist"
depends on BLOBLIST
default 0xc000 if SANDBOX
help
Sets the address of the bloblist, set up by the first part of U-Boot
which runs. Subsequent U-Boot stages typically use the same address.
This is not used if BLOBLIST_ALLOC is selected.
config BLOBLIST_SIZE_RELOC
hex "Size of bloblist after relocation"
depends on BLOBLIST
default BLOBLIST_SIZE
help
Sets the size of the bloblist in bytes after relocation. Since U-Boot
@@ -755,6 +764,8 @@ config BLOBLIST_SIZE_RELOC
size than the one set up by SPL. This bloblist is set up during the
relocation process.
endif # BLOBLIST
endmenu
source "common/spl/Kconfig"

View File

@@ -7,6 +7,7 @@
#include <common.h>
#include <bloblist.h>
#include <log.h>
#include <malloc.h>
#include <mapmem.h>
#include <spl.h>
#include <asm/global_data.h>
@@ -416,10 +417,21 @@ int bloblist_init(void)
ret = bloblist_check(CONFIG_BLOBLIST_ADDR,
CONFIG_BLOBLIST_SIZE);
if (ret) {
ulong addr;
log(LOGC_BLOBLIST, expected ? LOGL_WARNING : LOGL_DEBUG,
"Existing bloblist not found: creating new bloblist\n");
ret = bloblist_new(CONFIG_BLOBLIST_ADDR, CONFIG_BLOBLIST_SIZE,
0);
if (IS_ENABLED(CONFIG_BLOBLIST_ALLOC)) {
void *ptr = memalign(BLOBLIST_ALIGN,
CONFIG_BLOBLIST_SIZE);
if (!ptr)
return log_msg_ret("alloc", -ENOMEM);
addr = map_to_sysmem(ptr);
} else {
addr = CONFIG_BLOBLIST_ADDR;
}
ret = bloblist_new(addr, CONFIG_BLOBLIST_SIZE, 0);
} else {
log(LOGC_BLOBLIST, LOGL_DEBUG, "Found existing bloblist\n");
}

View File

@@ -655,8 +655,14 @@ static int reloc_bootstage(void)
static int reloc_bloblist(void)
{
#ifdef CONFIG_BLOBLIST
if (gd->flags & GD_FLG_SKIP_RELOC)
/*
* Relocate only if we are supposed to send it
*/
if ((gd->flags & GD_FLG_SKIP_RELOC) &&
CONFIG_BLOBLIST_SIZE == CONFIG_BLOBLIST_SIZE_RELOC) {
debug("Not relocating bloblist\n");
return 0;
}
if (gd->new_bloblist) {
int size = CONFIG_BLOBLIST_SIZE;

View File

@@ -61,6 +61,7 @@ CONFIG_I2C_CROS_EC_TUNNEL=y
CONFIG_SYS_I2C_ROCKCHIP=y
CONFIG_I2C_MUX=y
CONFIG_DM_KEYBOARD=y
CONFIG_KEYBOARD=y
CONFIG_CROS_EC_KEYB=y
CONFIG_CROS_EC=y
CONFIG_CROS_EC_SPI=y

View File

@@ -63,6 +63,7 @@ CONFIG_I2C_CROS_EC_TUNNEL=y
CONFIG_SYS_I2C_ROCKCHIP=y
CONFIG_I2C_MUX=y
CONFIG_DM_KEYBOARD=y
CONFIG_KEYBOARD=y
CONFIG_CROS_EC_KEYB=y
CONFIG_CROS_EC=y
CONFIG_CROS_EC_SPI=y

View File

@@ -63,6 +63,7 @@ CONFIG_I2C_CROS_EC_TUNNEL=y
CONFIG_SYS_I2C_ROCKCHIP=y
CONFIG_I2C_MUX=y
CONFIG_DM_KEYBOARD=y
CONFIG_KEYBOARD=y
CONFIG_CROS_EC_KEYB=y
CONFIG_CROS_EC=y
CONFIG_CROS_EC_SPI=y

View File

@@ -62,6 +62,7 @@ CONFIG_I2C_CROS_EC_TUNNEL=y
CONFIG_SYS_I2C_ROCKCHIP=y
CONFIG_I2C_MUX=y
CONFIG_DM_KEYBOARD=y
CONFIG_KEYBOARD=y
CONFIG_CROS_EC_KEYB=y
CONFIG_CROS_EC=y
CONFIG_CROS_EC_SPI=y

View File

@@ -60,6 +60,7 @@ CONFIG_DWC_AHSATA=y
CONFIG_SYS_I2C_LEGACY=y
CONFIG_SPL_SYS_I2C_LEGACY=y
CONFIG_SYS_I2C_MXC=y
CONFIG_KEYBOARD=y
CONFIG_FSL_USDHC=y
CONFIG_PHYLIB=y
CONFIG_PHY_MICREL=y

View File

@@ -43,6 +43,7 @@ CONFIG_USE_ENV_SPI_BUS=y
CONFIG_ENV_SPI_BUS=1
CONFIG_SYS_RELOC_GD_ENV_ADDR=y
CONFIG_SYS_I2C_S3C24X0=y
# CONFIG_KEYBOARD is not set
CONFIG_SUPPORT_EMMC_BOOT=y
CONFIG_MMC_DW=y
CONFIG_MMC_SDHCI=y

View File

@@ -38,6 +38,7 @@ CONFIG_USE_ENV_SPI_BUS=y
CONFIG_ENV_SPI_BUS=1
CONFIG_SYS_RELOC_GD_ENV_ADDR=y
CONFIG_SYS_I2C_S3C24X0=y
# CONFIG_KEYBOARD is not set
CONFIG_SUPPORT_EMMC_BOOT=y
CONFIG_MMC_DW=y
CONFIG_MMC_SDHCI=y

View File

@@ -13,23 +13,22 @@ CONFIG_MISC_INIT_F=y
# CONFIG_CMD_BOOTM is not set
# CONFIG_CMD_ELF is not set
# CONFIG_CMD_EXTENSION is not set
CONFIG_BOOTP_DNS2=y
# CONFIG_CMD_DATE is not set
CONFIG_OF_CONTROL=y
CONFIG_SYS_RELOC_GD_ENV_ADDR=y
CONFIG_BOOTP_SEND_HOSTNAME=y
CONFIG_IP_DEFRAG=y
# CONFIG_NET is not set
# CONFIG_ACPIGEN is not set
CONFIG_AXI=y
CONFIG_AXI_SANDBOX=y
# CONFIG_UDP_FUNCTION_FASTBOOT is not set
CONFIG_SANDBOX_GPIO=y
CONFIG_PCI=y
CONFIG_PCI_SANDBOX=y
CONFIG_DM_RTC=y
CONFIG_SOUND=y
CONFIG_SYSRESET=y
CONFIG_TIMER=y
CONFIG_I2C_EDID=y
# CONFIG_VIRTIO_MMIO is not set
# CONFIG_VIRTIO_PCI is not set
# CONFIG_VIRTIO_SANDBOX is not set
# CONFIG_EFI_LOADER is not set

View File

@@ -59,6 +59,22 @@ Bloblist provides a fairly simple API which allows blobs to be created and
found. All access is via the blob's tag. Blob records are zeroed when added.
Placing the bloblist
--------------------
The bloblist is typically positioned at a fixed address by TPL, or SPL. This
is controlled by `CONFIG_BLOBLIST_ADDR`. But in some cases it is preferable to
allocate the bloblist in the malloc() space. Use the `CONFIG_BLOBLIST_ALLOC`
option to enable this.
The bloblist is automatically relocated as part of U-Boot relocation. Sometimes
it is useful to expand the bloblist in U-Boot proper, since it may want to add
information for use by Linux. Note that this does not mean that Linux needs to
know anything about the bloblist format, just that it is convenient to use
bloblist to place things contiguously in memory. Set
`CONFIG_BLOBLIST_SIZE_RELOC` to define the expanded size, if needed.
Finishing the bloblist
----------------------

View File

@@ -98,3 +98,11 @@ Deadline: 2021.10
The I2C subsystem has supported the driver model since early 2015.
Maintainers should submit patches switching over to using CONFIG_DM_I2C and
other base driver model options in time for inclusion in the 2021.10 release.
CONFIG_KEYBOARD
---------------
Deadline: 2022.10
This is a legacy option which has been replaced by driver model.
Maintainers should submit patches switching over to using CONFIG_DM_KEYBOARD and
other base driver model options in time for inclusion in the 2022.10 release.

View File

@@ -30,16 +30,11 @@ Sandbox is a build of U-Boot that can run under Linux so it is a convenient
way of trying out tracing before you use it on your actual board. To do
this, follow these steps:
Add the following to include/configs/sandbox.h (if not already there)
Add the following to config/sandbox_defconfig
.. code-block:: c
#define CONFIG_TRACE
#define CONFIG_CMD_TRACE
#define CONFIG_TRACE_BUFFER_SIZE (16 << 20)
#define CONFIG_TRACE_EARLY_SIZE (8 << 20)
#define CONFIG_TRACE_EARLY
#define CONFIG_TRACE_EARLY_ADDR 0x00100000
CONFIG_TRACE=y
Build sandbox U-Boot with tracing enabled:

View File

@@ -45,10 +45,6 @@ struct blk_desc ide_dev_desc[CONFIG_SYS_IDE_MAXDEVICE];
#define IDE_SPIN_UP_TIME_OUT 5000 /* 5 sec spin-up timeout */
#ifndef CONFIG_SYS_ATA_PORT_ADDR
#define CONFIG_SYS_ATA_PORT_ADDR(port) (port)
#endif
#ifdef CONFIG_IDE_RESET
extern void ide_set_reset(int idereset);
@@ -678,8 +674,7 @@ static void ide_ident(struct blk_desc *dev_desc)
__weak void ide_outb(int dev, int port, unsigned char val)
{
debug("ide_outb (dev= %d, port= 0x%x, val= 0x%02x) : @ 0x%08lx\n",
dev, port, val,
(ATA_CURR_BASE(dev) + CONFIG_SYS_ATA_PORT_ADDR(port)));
dev, port, val, ATA_CURR_BASE(dev) + port);
#if defined(CONFIG_IDE_AHB)
if (port) {
@@ -690,7 +685,7 @@ __weak void ide_outb(int dev, int port, unsigned char val)
outb(val, (ATA_CURR_BASE(dev)));
}
#else
outb(val, (ATA_CURR_BASE(dev) + CONFIG_SYS_ATA_PORT_ADDR(port)));
outb(val, ATA_CURR_BASE(dev) + port);
#endif
}
@@ -701,12 +696,11 @@ __weak unsigned char ide_inb(int dev, int port)
#if defined(CONFIG_IDE_AHB)
val = ide_read_register(dev, port);
#else
val = inb((ATA_CURR_BASE(dev) + CONFIG_SYS_ATA_PORT_ADDR(port)));
val = inb(ATA_CURR_BASE(dev) + port);
#endif
debug("ide_inb (dev= %d, port= 0x%x) : @ 0x%08lx -> 0x%02x\n",
dev, port,
(ATA_CURR_BASE(dev) + CONFIG_SYS_ATA_PORT_ADDR(port)), val);
dev, port, ATA_CURR_BASE(dev) + port, val);
return val;
}

View File

@@ -19,11 +19,11 @@
DECLARE_GLOBAL_DATA_PTR;
#ifndef CONFIG_BLK
static struct host_block_dev host_devices[CONFIG_HOST_MAX_DEVICES];
static struct host_block_dev host_devices[SANDBOX_HOST_MAX_DEVICES];
static struct host_block_dev *find_host_device(int dev)
{
if (dev >= 0 && dev < CONFIG_HOST_MAX_DEVICES)
if (dev >= 0 && dev < SANDBOX_HOST_MAX_DEVICES)
return &host_devices[dev];
return NULL;
@@ -259,7 +259,7 @@ U_BOOT_DRIVER(sandbox_host_blk) = {
U_BOOT_LEGACY_BLK(sandbox_host) = {
.if_typename = "host",
.if_type = IF_TYPE_HOST,
.max_devs = CONFIG_HOST_MAX_DEVICES,
.max_devs = SANDBOX_HOST_MAX_DEVICES,
.get_dev = host_get_dev_err,
};
#endif

View File

@@ -58,7 +58,7 @@ static int bind_drivers_pass(struct udevice *parent, bool pre_reloc_only)
const int n_ents = ll_entry_count(struct driver_info, driver_info);
bool missing_parent = false;
int result = 0;
uint idx;
int idx;
/*
* Do one iteration through the driver_info records. For of-platdata,

View File

@@ -38,6 +38,15 @@ config TPL_DM_KEYBOARD
includes methods to start/stop the device, check for available
input and update LEDs if the keyboard has them.
config KEYBOARD
bool "Enable legacy keyboard support (deprecated)"
help
Enable this to enable a custom keyboard support.
This simply calls drv_keyboard_init() which must be
defined in your board-specific files. This option is deprecated
and is only used by novena. For new boards, use driver model
instead.
config CROS_EC_KEYB
bool "Enable Chrome OS EC keyboard support"
depends on INPUT

View File

@@ -113,8 +113,6 @@
*/
#ifdef CONFIG_IDE
#define __io
/* Needs byte-swapping for ATA data register */
#define CONFIG_IDE_SWAP_IO
/* Data, registers and alternate blocks are at the same offset */
#define CONFIG_SYS_ATA_DATA_OFFSET (0x0100)
#define CONFIG_SYS_ATA_REG_OFFSET (0x0100)

View File

@@ -30,7 +30,4 @@
#define LCD_BPP LCD_COLOR16
#endif
/* Enable keyboard */
#define CONFIG_KEYBOARD
#endif

View File

@@ -9,7 +9,6 @@
#define __CONFIG_H
/* System configurations */
#define CONFIG_KEYBOARD
#include "mx6_common.h"

View File

@@ -44,7 +44,6 @@
#define CONFIG_SYS_ATA_DATA_OFFSET 0x1000 /* data reg offset */
#define CONFIG_SYS_ATA_REG_OFFSET 0x1000 /* reg offset */
#define CONFIG_SYS_ATA_ALT_OFFSET 0x800 /* alternate register offset */
#define CONFIG_IDE_SWAP_IO
/*
* SuperH PCI Bridge Configration

View File

@@ -10,22 +10,10 @@
#define CONFIG_IO_TRACE
#endif
#ifndef CONFIG_TIMER
#define CONFIG_SYS_TIMER_RATE 1000000
#endif
#define CONFIG_HOST_MAX_DEVICES 4
#define CONFIG_MALLOC_F_ADDR 0x0010000
#define CONFIG_SYS_CBSIZE 1024 /* Console I/O Buffer Size */
/* turn on command-line edit/c/auto */
/* SPI - enable all SPI flash types for testing purposes */
#define CONFIG_SYS_FDT_LOAD_ADDR 0x100
#define CONFIG_PHYSMEM
/* Size of our emulated memory */
@@ -48,14 +36,6 @@
#define CONFIG_SANDBOX_SDL
#endif
/* LCD and keyboard require SDL support */
#ifdef CONFIG_SANDBOX_SDL
#define LCD_BPP LCD_COLOR16
#define CONFIG_LCD_BMP_RLE8
#define CONFIG_KEYBOARD
#endif
#ifndef CONFIG_SPL_BUILD
#define CONFIG_SYS_IDE_MAXBUS 1
#define CONFIG_SYS_ATA_IDE0_OFFSET 0

View File

@@ -14,7 +14,6 @@
#undef CONFIG_EXYNOS_FB
#undef CONFIG_EXYNOS_DP
#undef CONFIG_KEYBOARD
#define CONFIG_BOARD_COMMON

View File

@@ -15,8 +15,6 @@
#undef CONFIG_EXYNOS_FB
#undef CONFIG_EXYNOS_DP
#undef CONFIG_KEYBOARD
#define CONFIG_BOARD_COMMON
#define CONFIG_SMDK5420 /* which is in a SMDK5420 */

View File

@@ -13,6 +13,4 @@
#include <configs/rk3288_common.h>
#define CONFIG_KEYBOARD
#endif

View File

@@ -6,6 +6,9 @@
#ifndef __SANDBOX_BLOCK_DEV__
#define __SANDBOX_BLOCK_DEV__
/* Maximum number of host devices - see drivers/block/sandbox.c */
#define SANDBOX_HOST_MAX_DEVICES 4
struct host_block_dev {
#ifndef CONFIG_BLK
struct blk_desc blk_dev;

View File

@@ -440,7 +440,6 @@ CONFIG_HIDE_LOGO_VERSION
CONFIG_HIKEY_GPIO
CONFIG_HITACHI_SX14
CONFIG_HOSTNAME
CONFIG_HOST_MAX_DEVICES
CONFIG_HPS_ALTERAGRP_DBGATCLK
CONFIG_HPS_ALTERAGRP_MAINCLK
CONFIG_HPS_ALTERAGRP_MPUCLK
@@ -606,7 +605,6 @@ CONFIG_ICACHE
CONFIG_ICS307_REFCLK_HZ
CONFIG_IDE_PREINIT
CONFIG_IDE_RESET
CONFIG_IDE_SWAP_IO
CONFIG_IMA
CONFIG_IMX
CONFIG_IMX6_PWM_PER_CLK
@@ -642,7 +640,6 @@ CONFIG_JFFS2_SUMMARY
CONFIG_JRSTARTR_JR0
CONFIG_JTAG_CONSOLE
CONFIG_KEEP_SERVERADDR
CONFIG_KEYBOARD
CONFIG_KEY_REVOCATION
CONFIG_KIRKWOOD_EGIGA_INIT
CONFIG_KIRKWOOD_GPIO
@@ -1231,7 +1228,6 @@ CONFIG_SYS_ATA_BASE_ADDR
CONFIG_SYS_ATA_DATA_OFFSET
CONFIG_SYS_ATA_IDE0_OFFSET
CONFIG_SYS_ATA_IDE1_OFFSET
CONFIG_SYS_ATA_PORT_ADDR
CONFIG_SYS_ATA_REG_OFFSET
CONFIG_SYS_ATA_STRIDE
CONFIG_SYS_ATMEL_CPU_NAME
@@ -1610,7 +1606,6 @@ CONFIG_SYS_FAST_CLK
CONFIG_SYS_FAULT_ECHO_LINK_DOWN
CONFIG_SYS_FAULT_MII_ADDR
CONFIG_SYS_FDT_BASE
CONFIG_SYS_FDT_LOAD_ADDR
CONFIG_SYS_FDT_PAD
CONFIG_SYS_FEC0_IOBASE
CONFIG_SYS_FEC1_IOBASE

View File

@@ -913,6 +913,11 @@ or with wildcards::
u-boot-dtb 180 108 u-boot-dtb 80 3b5
image-header bf8 8 image-header bf8
If an older version of binman is used to list images created by a newer one, it
is possible that it will contain entry types that are not supported. These still
show with the correct type, but binman just sees them as blobs (plain binary
data). Any special features of that etype are not supported by the old binman.
Extracting files from images
----------------------------
@@ -937,12 +942,41 @@ or just a selection::
$ binman extract -i image.bin "*u-boot*" -O outdir
Some entry types have alternative formats, for example fdtmap which allows
extracted just the devicetree binary without the fdtmap header::
$ binman extract -i /tmp/b/odroid-c4/image.bin -f out.dtb -F fdt fdtmap
$ fdtdump out.dtb
/dts-v1/;
// magic: 0xd00dfeed
// totalsize: 0x8ab (2219)
// off_dt_struct: 0x38
// off_dt_strings: 0x82c
// off_mem_rsvmap: 0x28
// version: 17
// last_comp_version: 2
// boot_cpuid_phys: 0x0
// size_dt_strings: 0x7f
// size_dt_struct: 0x7f4
/ {
image-node = "binman";
image-pos = <0x00000000>;
size = <0x0011162b>;
...
Use `-F list` to see what alternative formats are available::
$ binman extract -i /tmp/b/odroid-c4/image.bin -F list
Flag (-F) Entry type Description
fdt fdtmap Extract the devicetree blob from the fdtmap
Replacing files in an image
---------------------------
You can replace files in an existing firmware image created by binman, provided
that there is an 'fdtmap' entry in the image. For example:
that there is an 'fdtmap' entry in the image. For example::
$ binman replace -i image.bin section/cbfs/u-boot
@@ -1081,6 +1115,35 @@ the tool's output will be used for the target or for the host machine. If those
aren't given, it will also try to derive target-specific versions from the
CROSS_COMPILE environment variable during a cross-compilation.
If the tool is not available in the path you can use BINMAN_TOOLPATHS to specify
a space-separated list of paths to search, e.g.::
BINMAN_TOOLPATHS="/tools/g12a /tools/tegra" binman ...
External blobs
--------------
Binary blobs, even if the source code is available, complicate building
firmware. The instructions can involve multiple steps and the binaries may be
hard to build or obtain. Binman at least provides a unified description of how
to build the final image, no matter what steps are needed to get there.
Binman also provides a `blob-ext` entry type that pulls in a binary blob from an
external file. If the file is missing, binman can optionally complete the build
and just report a warning. Use the `-M/--allow-missing` option to enble this.
This is useful in CI systems which want to check that everything is correct but
don't have access to the blobs.
If the blobs are in a different directory, you can specify this with the `-I`
option.
For U-Boot, you can use set the BINMAN_INDIRS environment variable to provide a
space-separated list of directories to search for binary blobs::
BINMAN_INDIRS="odroid-c4/fip/g12a \
odroid-c4/build/board/hardkernel/odroidc4/firmware \
odroid-c4/build/scp_task" binman ...
Code coverage
-------------

View File

@@ -2,18 +2,68 @@
# Copyright (c) 2016 Google, Inc
# Written by Simon Glass <sjg@chromium.org>
#
# Command-line parser for binman
#
"""Command-line parser for binman"""
import argparse
from argparse import ArgumentParser
import state
def make_extract_parser(subparsers):
"""make_extract_parser: Make a subparser for the 'extract' command
Args:
subparsers (ArgumentParser): parser to add this one to
"""
extract_parser = subparsers.add_parser('extract',
help='Extract files from an image')
extract_parser.add_argument('-F', '--format', type=str,
help='Select an alternative format for extracted data')
extract_parser.add_argument('-i', '--image', type=str, required=True,
help='Image filename to extract')
extract_parser.add_argument('-f', '--filename', type=str,
help='Output filename to write to')
extract_parser.add_argument('-O', '--outdir', type=str, default='',
help='Path to directory to use for output files')
extract_parser.add_argument('paths', type=str, nargs='*',
help='Paths within file to extract (wildcard)')
extract_parser.add_argument('-U', '--uncompressed', action='store_true',
help='Output raw uncompressed data for compressed entries')
#pylint: disable=R0903
class BinmanVersion(argparse.Action):
"""Handles the -V option to binman
This reads the version information from a file called 'version' in the same
directory as this file.
If not present it assumes this is running from the U-Boot tree and collects
the version from the Makefile.
The format of the version information is three VAR = VALUE lines, for
example:
VERSION = 2022
PATCHLEVEL = 01
EXTRAVERSION = -rc2
"""
def __init__(self, nargs=0, **kwargs):
super().__init__(nargs=nargs, **kwargs)
def __call__(self, parser, namespace, values, option_string=None):
parser._print_message(f'Binman {state.GetVersion()}\n')
parser.exit()
def ParseArgs(argv):
"""Parse the binman command-line arguments
Args:
argv: List of string arguments
argv (list of str): List of string arguments
Returns:
Tuple (options, args) with the command-line options and arugments.
tuple: (options, args) with the command-line options and arugments.
options provides access to the options (e.g. option.debug)
args is a list of string arguments
"""
@@ -39,6 +89,7 @@ controlled by a description in the board device tree.'''
parser.add_argument('-v', '--verbosity', default=1,
type=int, help='Control verbosity: 0=silent, 1=warnings, 2=notices, '
'3=info, 4=detail, 5=debug')
parser.add_argument('-V', '--version', nargs=0, action=BinmanVersion)
subparsers = parser.add_subparsers(dest='cmd')
subparsers.required = True
@@ -74,8 +125,8 @@ controlled by a description in the board device tree.'''
build_parser.add_argument('--update-fdt-in-elf', type=str,
help='Update an ELF file with the output dtb: infile,outfile,begin_sym,end_sym')
entry_parser = subparsers.add_parser('entry-docs',
help='Write out entry documentation (see entries.rst)')
subparsers.add_parser(
'entry-docs', help='Write out entry documentation (see entries.rst)')
list_parser = subparsers.add_parser('ls', help='List files in an image')
list_parser.add_argument('-i', '--image', type=str, required=True,
@@ -83,18 +134,7 @@ controlled by a description in the board device tree.'''
list_parser.add_argument('paths', type=str, nargs='*',
help='Paths within file to list (wildcard)')
extract_parser = subparsers.add_parser('extract',
help='Extract files from an image')
extract_parser.add_argument('-i', '--image', type=str, required=True,
help='Image filename to extract')
extract_parser.add_argument('-f', '--filename', type=str,
help='Output filename to write to')
extract_parser.add_argument('-O', '--outdir', type=str, default='',
help='Path to directory to use for output files')
extract_parser.add_argument('paths', type=str, nargs='*',
help='Paths within file to extract (wildcard)')
extract_parser.add_argument('-U', '--uncompressed', action='store_true',
help='Output raw uncompressed data for compressed entries')
make_extract_parser(subparsers)
replace_parser = subparsers.add_parser('replace',
help='Replace entries in an image')

View File

@@ -200,8 +200,24 @@ def ReadEntry(image_fname, entry_path, decomp=True):
return entry.ReadData(decomp)
def ShowAltFormats(image):
"""Show alternative formats available for entries in the image
This shows a list of formats available.
Args:
image (Image): Image to check
"""
alt_formats = {}
image.CheckAltFormats(alt_formats)
print('%-10s %-20s %s' % ('Flag (-F)', 'Entry type', 'Description'))
for name, val in alt_formats.items():
entry, helptext = val
print('%-10s %-20s %s' % (name, entry.etype, helptext))
def ExtractEntries(image_fname, output_fname, outdir, entry_paths,
decomp=True):
decomp=True, alt_format=None):
"""Extract the data from one or more entries and write it to files
Args:
@@ -217,6 +233,10 @@ def ExtractEntries(image_fname, output_fname, outdir, entry_paths,
"""
image = Image.FromFile(image_fname)
if alt_format == 'list':
ShowAltFormats(image)
return
# Output an entry to a single file, as a special case
if output_fname:
if not entry_paths:
@@ -224,7 +244,7 @@ def ExtractEntries(image_fname, output_fname, outdir, entry_paths,
if len(entry_paths) != 1:
raise ValueError('Must specify exactly one entry path to write with -f')
entry = image.FindEntryPath(entry_paths[0])
data = entry.ReadData(decomp)
data = entry.ReadData(decomp, alt_format)
tools.WriteFile(output_fname, data)
tout.Notice("Wrote %#x bytes to file '%s'" % (len(data), output_fname))
return
@@ -236,7 +256,7 @@ def ExtractEntries(image_fname, output_fname, outdir, entry_paths,
tout.Notice('%d entries match and will be written' % len(einfos))
for einfo in einfos:
entry = einfo.entry
data = entry.ReadData(decomp)
data = entry.ReadData(decomp, alt_format)
path = entry.GetPath()[1:]
fname = os.path.join(outdir, path)
@@ -584,7 +604,7 @@ def Binman(args):
if args.cmd == 'extract':
ExtractEntries(args.image, args.filename, args.outdir, args.paths,
not args.uncompressed)
not args.uncompressed, args.format)
if args.cmd == 'replace':
ReplaceEntries(args.image, args.filename, args.indir, args.paths,

View File

@@ -69,6 +69,20 @@ See 'blob' for Properties / Entry arguments.
Entry: blob-ext-list: List of externally built binary blobs
-----------------------------------------------------------
This is like blob-ext except that a number of blobs can be provided,
typically with some sort of relationship, e.g. all are DDC parameters.
If any of the external files needed by this llist is missing, binman can
optionally ignore it and produce a broken image with a warning.
Args:
filenames: List of filenames to read and include
Entry: blob-named-by-arg: A blob entry which gets its filename property from its subclass
-----------------------------------------------------------------------------------------
@@ -314,6 +328,10 @@ Example output for a simple image with U-Boot and an FDT map::
If allow-repack is used then 'orig-offset' and 'orig-size' properties are
added as necessary. See the binman README.
When extracting files, an alternative 'fdt' format is available for fdtmaps.
Use `binman extract -F fdt ...` to use this. It will export a devicetree,
without the fdtmap header, so it can be viewed with `fdtdump`.
Entry: files: A set of files arranged in a section
@@ -799,39 +817,135 @@ This entry holds firmware for an external platform-specific coprocessor.
Entry: section: Entry that contains other entries
-------------------------------------------------
Properties / Entry arguments: (see binman README for more information):
pad-byte: Pad byte to use when padding
sort-by-offset: True if entries should be sorted by offset, False if
they must be in-order in the device tree description
A section is an entry which can contain other entries, thus allowing
hierarchical images to be created. See 'Sections and hierarchical images'
in the binman README for more information.
end-at-4gb: Used to build an x86 ROM which ends at 4GB (2^32)
The base implementation simply joins the various entries together, using
various rules about alignment, etc.
skip-at-start: Number of bytes before the first entry starts. These
effectively adjust the starting offset of entries. For example,
if this is 16, then the first entry would start at 16. An entry
with offset = 20 would in fact be written at offset 4 in the image
file, since the first 16 bytes are skipped when writing.
name-prefix: Adds a prefix to the name of every entry in the section
when writing out the map
align_default: Default alignment for this section, if no alignment is
given in the entry
Subclassing
~~~~~~~~~~~
Properties:
allow_missing: True if this section permits external blobs to be
missing their contents. The second will produce an image but of
course it will not work.
This class can be subclassed to support other file formats which hold
multiple entries, such as CBFS. To do this, override the following
functions. The documentation here describes what your function should do.
For example code, see etypes which subclass `Entry_section`, or `cbfs.py`
for a more involved example::
Properties:
_allow_missing: True if this section permits external blobs to be
missing their contents. The second will produce an image but of
course it will not work.
$ grep -l \(Entry_section tools/binman/etype/*.py
ReadNode()
Call `super().ReadNode()`, then read any special properties for the
section. Then call `self.ReadEntries()` to read the entries.
Binman calls this at the start when reading the image description.
ReadEntries()
Read in the subnodes of the section. This may involve creating entries
of a particular etype automatically, as well as reading any special
properties in the entries. For each entry, entry.ReadNode() should be
called, to read the basic entry properties. The properties should be
added to `self._entries[]`, in the correct order, with a suitable name.
Binman calls this at the start when reading the image description.
BuildSectionData(required)
Create the custom file format that you want and return it as bytes.
This likely sets up a file header, then loops through the entries,
adding them to the file. For each entry, call `entry.GetData()` to
obtain the data. If that returns None, and `required` is False, then
this method must give up and return None. But if `required` is True then
it should assume that all data is valid.
Binman calls this when packing the image, to find out the size of
everything. It is called again at the end when building the final image.
SetImagePos(image_pos):
Call `super().SetImagePos(image_pos)`, then set the `image_pos` values
for each of the entries. This should use the custom file format to find
the `start offset` (and `image_pos`) of each entry. If the file format
uses compression in such a way that there is no offset available (other
than reading the whole file and decompressing it), then the offsets for
affected entries can remain unset (`None`). The size should also be set
if possible.
Binman calls this after the image has been packed, to update the
location that all the entries ended up at.
ReadChildData(child, decomp, alt_format):
The default version of this may be good enough, if you are able to
implement SetImagePos() correctly. But that is a bit of a bypass, so
you can override this method to read from your custom file format. It
should read the entire entry containing the custom file using
`super().ReadData(True)`, then parse the file to get the data for the
given child, then return that data.
If your file format supports compression, the `decomp` argument tells
you whether to return the compressed data (`decomp` is False) or to
uncompress it first, then return the uncompressed data (`decomp` is
True). This is used by the `binman extract -U` option.
If your entry supports alternative formats, the alt_format provides the
alternative format that the user has selected. Your function should
return data in that format. This is used by the 'binman extract -l'
option.
Binman calls this when reading in an image, in order to populate all the
entries with the data from that image (`binman ls`).
WriteChildData(child):
Binman calls this after `child.data` is updated, to inform the custom
file format about this, in case it needs to do updates.
The default version of this does nothing and probably needs to be
overridden for the 'binman replace' command to work. Your version should
use `child.data` to update the data for that child in the custom file
format.
Binman calls this when updating an image that has been read in and in
particular to update the data for a particular entry (`binman replace`)
Properties / Entry arguments
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
See :ref:`develop/package/binman:Image description format` for more
information.
align-default
Default alignment for this section, if no alignment is given in the
entry
pad-byte
Pad byte to use when padding
sort-by-offset
True if entries should be sorted by offset, False if they must be
in-order in the device tree description
end-at-4gb
Used to build an x86 ROM which ends at 4GB (2^32)
name-prefix
Adds a prefix to the name of every entry in the section when writing out
the map
skip-at-start
Number of bytes before the first entry starts. These effectively adjust
the starting offset of entries. For example, if this is 16, then the
first entry would start at 16. An entry with offset = 20 would in fact
be written at offset 4 in the image file, since the first 16 bytes are
skipped when writing.
Since a section is also an entry, it inherits all the properies of entries
too.
A section is an entry which can contain other entries, thus allowing
hierarchical images to be created. See 'Sections and hierarchical images'
in the binman README for more information.
Note that the `allow_missing` member controls whether this section permits
external blobs to be missing their contents. The option will produce an
image but of course it will not work. It is useful to make sure that
Continuous Integration systems can build without the binaries being
available. This is set by the `SetAllowMissing()` method, if
`--allow-missing` is passed to binman.

View File

@@ -95,14 +95,14 @@ class Entry(object):
self.pad_after = 0
self.offset_unset = False
self.image_pos = None
self._expand_size = False
self.expand_size = False
self.compress = 'none'
self.missing = False
self.external = False
self.allow_missing = False
@staticmethod
def Lookup(node_path, etype, expanded):
def FindEntryClass(etype, expanded):
"""Look up the entry class for a node.
Args:
@@ -113,10 +113,9 @@ class Entry(object):
Returns:
The entry class object if found, else None if not found and expanded
is True
Raise:
ValueError if expanded is False and the class is not found
is True, else a tuple:
module name that could not be found
exception received
"""
# Convert something like 'u-boot@0' to 'u_boot' since we are only
# interested in the type.
@@ -137,30 +136,66 @@ class Entry(object):
except ImportError as e:
if expanded:
return None
raise ValueError("Unknown entry type '%s' in node '%s' (expected etype/%s.py, error '%s'" %
(etype, node_path, module_name, e))
return module_name, e
modules[module_name] = module
# Look up the expected class name
return getattr(module, 'Entry_%s' % module_name)
@staticmethod
def Create(section, node, etype=None, expanded=False):
def Lookup(node_path, etype, expanded, missing_etype=False):
"""Look up the entry class for a node.
Args:
node_node (str): Path name of Node object containing information
about the entry to create (used for errors)
etype (str): Entry type to use
expanded (bool): Use the expanded version of etype
missing_etype (bool): True to default to a blob etype if the
requested etype is not found
Returns:
The entry class object if found, else None if not found and expanded
is True
Raise:
ValueError if expanded is False and the class is not found
"""
# Convert something like 'u-boot@0' to 'u_boot' since we are only
# interested in the type.
cls = Entry.FindEntryClass(etype, expanded)
if cls is None:
return None
elif isinstance(cls, tuple):
if missing_etype:
cls = Entry.FindEntryClass('blob', False)
if isinstance(cls, tuple): # This should not fail
module_name, e = cls
raise ValueError(
"Unknown entry type '%s' in node '%s' (expected etype/%s.py, error '%s'" %
(etype, node_path, module_name, e))
return cls
@staticmethod
def Create(section, node, etype=None, expanded=False, missing_etype=False):
"""Create a new entry for a node.
Args:
section: Section object containing this node
node: Node object containing information about the entry to
create
etype: Entry type to use, or None to work it out (used for tests)
expanded: True to use expanded versions of entries, where available
section (entry_Section): Section object containing this node
node (Node): Node object containing information about the entry to
create
etype (str): Entry type to use, or None to work it out (used for
tests)
expanded (bool): Use the expanded version of etype
missing_etype (bool): True to default to a blob etype if the
requested etype is not found
Returns:
A new Entry object of the correct type (a subclass of Entry)
"""
if not etype:
etype = fdt_util.GetString(node, 'type', node.name)
obj = Entry.Lookup(node.path, etype, expanded)
obj = Entry.Lookup(node.path, etype, expanded, missing_etype)
if obj and expanded:
# Check whether to use the expanded entry
new_etype = etype + '-expanded'
@@ -170,7 +205,7 @@ class Entry(object):
else:
obj = None
if not obj:
obj = Entry.Lookup(node.path, etype, False)
obj = Entry.Lookup(node.path, etype, False, missing_etype)
# Call its constructor to get the object we want.
return obj(section, etype, node)
@@ -780,7 +815,7 @@ features to produce new behaviours.
self.AddEntryInfo(entries, indent, self.name, self.etype, self.size,
self.image_pos, self.uncomp_size, self.offset, self)
def ReadData(self, decomp=True):
def ReadData(self, decomp=True, alt_format=None):
"""Read the data for an entry from the image
This is used when the image has been read in and we want to extract the
@@ -797,19 +832,20 @@ features to produce new behaviours.
# although compressed sections are currently not supported
tout.Debug("ReadChildData section '%s', entry '%s'" %
(self.section.GetPath(), self.GetPath()))
data = self.section.ReadChildData(self, decomp)
data = self.section.ReadChildData(self, decomp, alt_format)
return data
def ReadChildData(self, child, decomp=True):
def ReadChildData(self, child, decomp=True, alt_format=None):
"""Read the data for a particular child entry
This reads data from the parent and extracts the piece that relates to
the given child.
Args:
child: Child entry to read data for (must be valid)
decomp: True to decompress any compressed data before returning it;
False to return the raw, uncompressed data
child (Entry): Child entry to read data for (must be valid)
decomp (bool): True to decompress any compressed data before
returning it; False to return the raw, uncompressed data
alt_format (str): Alternative format to read in, or None
Returns:
Data for the child (bytes)
@@ -822,6 +858,20 @@ features to produce new behaviours.
self.ProcessContentsUpdate(data)
self.Detail('Loaded data size %x' % len(data))
def GetAltFormat(self, data, alt_format):
"""Read the data for an extry in an alternative format
Supported formats are list in the documentation for each entry. An
example is fdtmap which provides .
Args:
data (bytes): Data to convert (this should have been produced by the
entry)
alt_format (str): Format to use
"""
pass
def GetImage(self):
"""Get the image containing this entry
@@ -860,7 +910,8 @@ features to produce new behaviours.
"""Handle writing the data in a child entry
This should be called on the child's parent section after the child's
data has been updated. It
data has been updated. It should update any data structures needed to
validate that the update is successful.
This base-class implementation does nothing, since the base Entry object
does not have any children.
@@ -870,7 +921,7 @@ features to produce new behaviours.
Returns:
True if the section could be updated successfully, False if the
data is such that the section could not updat
data is such that the section could not update
"""
return True
@@ -961,3 +1012,13 @@ features to produce new behaviours.
tout.Info("Node '%s': etype '%s': %s selected" %
(node.path, etype, new_etype))
return True
def CheckAltFormats(self, alt_formats):
"""Add any alternative formats supported by this entry type
Args:
alt_formats (dict): Dict to add alt_formats to:
key: Name of alt format
value: Help text
"""
pass

View File

@@ -10,6 +10,7 @@ import sys
import unittest
from binman import entry
from binman.etype.blob import Entry_blob
from dtoc import fdt
from dtoc import fdt_util
from patman import tools
@@ -100,5 +101,13 @@ class TestEntry(unittest.TestCase):
self.assertIn("Unknown entry type 'missing' in node '/binman/u-boot'",
str(e.exception))
def testMissingEtype(self):
"""Test use of a blob etype when the requested one is not available"""
ent = entry.Entry.Create(None, self.GetNode(), 'missing',
missing_etype=True)
self.assertTrue(isinstance(ent, Entry_blob))
self.assertEquals('missing', ent.etype)
if __name__ == "__main__":
unittest.main()

View File

@@ -48,10 +48,10 @@ class Entry_blob(Entry):
self.ReadBlobContents()
return True
def ReadBlobContents(self):
def ReadFileContents(self, pathname):
"""Read blob contents into memory
This function compresses the data before storing if needed.
This function compresses the data before returning if needed.
We assume the data is small enough to fit into memory. If this
is used for large filesystem image that might not be true.
@@ -59,13 +59,23 @@ class Entry_blob(Entry):
new Entry method which can read in chunks. Then we could copy
the data in chunks and avoid reading it all at once. For now
this seems like an unnecessary complication.
Args:
pathname (str): Pathname to read from
Returns:
bytes: Data read
"""
state.TimingStart('read')
indata = tools.ReadFile(self._pathname)
indata = tools.ReadFile(pathname)
state.TimingAccum('read')
state.TimingStart('compress')
data = self.CompressData(indata)
state.TimingAccum('compress')
return data
def ReadBlobContents(self):
data = self.ReadFileContents(self._pathname)
self.SetContents(data)
return True

View File

@@ -0,0 +1,58 @@
# SPDX-License-Identifier: GPL-2.0+
# Copyright (c) 2016 Google, Inc
# Written by Simon Glass <sjg@chromium.org>
#
# Entry-type module for a list of external blobs, not built by U-Boot
#
import os
from binman.etype.blob import Entry_blob
from dtoc import fdt_util
from patman import tools
from patman import tout
class Entry_blob_ext_list(Entry_blob):
"""List of externally built binary blobs
This is like blob-ext except that a number of blobs can be provided,
typically with some sort of relationship, e.g. all are DDC parameters.
If any of the external files needed by this llist is missing, binman can
optionally ignore it and produce a broken image with a warning.
Args:
filenames: List of filenames to read and include
"""
def __init__(self, section, etype, node):
Entry_blob.__init__(self, section, etype, node)
self.external = True
def ReadNode(self):
super().ReadNode()
self._filenames = fdt_util.GetStringList(self._node, 'filenames')
self._pathnames = []
def ObtainContents(self):
missing = False
pathnames = []
for fname in self._filenames:
pathname = tools.GetInputFilename(
fname, self.external and self.section.GetAllowMissing())
# Allow the file to be missing
if not pathname:
missing = True
pathnames.append(pathname)
self._pathnames = pathnames
if missing:
self.SetContents(b'')
self.missing = True
return True
data = bytearray()
for pathname in pathnames:
data += self.ReadFileContents(pathname)
self.SetContents(data)
return True

View File

@@ -48,4 +48,4 @@ class Entry_blob_phase(Entry_section):
subnode = state.AddSubnode(self._node, name)
# Read entries again, now that we have some
self._ReadEntries()
self.ReadEntries()

View File

@@ -168,43 +168,17 @@ class Entry_cbfs(Entry):
from binman import state
super().__init__(section, etype, node)
self._cbfs_arg = fdt_util.GetString(node, 'cbfs-arch', 'x86')
self.align_default = None
self._cbfs_entries = OrderedDict()
self._ReadSubnodes()
self._entries = OrderedDict()
self.reader = None
def ObtainContents(self, skip=None):
arch = cbfs_util.find_arch(self._cbfs_arg)
if arch is None:
self.Raise("Invalid architecture '%s'" % self._cbfs_arg)
if self.size is None:
self.Raise("'cbfs' entry must have a size property")
cbfs = CbfsWriter(self.size, arch)
for entry in self._cbfs_entries.values():
# First get the input data and put it in a file. If not available,
# try later.
if entry != skip and not entry.ObtainContents():
return False
data = entry.GetData()
cfile = None
if entry._type == 'raw':
cfile = cbfs.add_file_raw(entry._cbfs_name, data,
entry._cbfs_offset,
entry._cbfs_compress)
elif entry._type == 'stage':
cfile = cbfs.add_file_stage(entry._cbfs_name, data,
entry._cbfs_offset)
else:
entry.Raise("Unknown cbfs-type '%s' (use 'raw', 'stage')" %
entry._type)
if cfile:
entry._cbfs_file = cfile
data = cbfs.get_data()
self.SetContents(data)
return True
def ReadNode(self):
"""Read properties from the atf-fip node"""
super().ReadNode()
self._cbfs_arg = fdt_util.GetString(self._node, 'cbfs-arch', 'x86')
self.ReadEntries()
def _ReadSubnodes(self):
def ReadEntries(self):
"""Read the subnodes to find out what should go in this CBFS"""
for node in self._node.subnodes:
entry = Entry.Create(self, node)
@@ -217,7 +191,41 @@ class Entry_cbfs(Entry):
if entry._cbfs_compress is None:
self.Raise("Invalid compression in '%s': '%s'" %
(node.name, compress))
self._cbfs_entries[entry._cbfs_name] = entry
self._entries[entry._cbfs_name] = entry
def ObtainCfile(self, cbfs, entry):
# First get the input data and put it in a file. If not available,
# try later.
data = entry.GetData()
cfile = None
if entry._type == 'raw':
cfile = cbfs.add_file_raw(entry._cbfs_name, data,
entry._cbfs_offset,
entry._cbfs_compress)
elif entry._type == 'stage':
cfile = cbfs.add_file_stage(entry._cbfs_name, data,
entry._cbfs_offset)
else:
entry.Raise("Unknown cbfs-type '%s' (use 'raw', 'stage')" %
entry._type)
return cfile
def ObtainContents(self, skip_entry=None):
arch = cbfs_util.find_arch(self._cbfs_arg)
if arch is None:
self.Raise("Invalid architecture '%s'" % self._cbfs_arg)
if self.size is None:
self.Raise("'cbfs' entry must have a size property")
cbfs = CbfsWriter(self.size, arch)
for entry in self._entries.values():
if entry != skip_entry and not entry.ObtainContents():
return False
cfile = self.ObtainCfile(cbfs, entry)
if cfile:
entry._cbfs_file = cfile
data = cbfs.get_data()
self.SetContents(data)
return True
def SetImagePos(self, image_pos):
"""Override this function to set all the entry properties from CBFS
@@ -230,7 +238,7 @@ class Entry_cbfs(Entry):
super().SetImagePos(image_pos)
# Now update the entries with info from the CBFS entries
for entry in self._cbfs_entries.values():
for entry in self._entries.values():
cfile = entry._cbfs_file
entry.size = cfile.data_len
entry.offset = cfile.calced_cbfs_offset
@@ -240,7 +248,7 @@ class Entry_cbfs(Entry):
def AddMissingProperties(self, have_image_pos):
super().AddMissingProperties(have_image_pos)
for entry in self._cbfs_entries.values():
for entry in self._entries.values():
entry.AddMissingProperties(have_image_pos)
if entry._cbfs_compress:
state.AddZeroProp(entry._node, 'uncomp-size')
@@ -252,7 +260,7 @@ class Entry_cbfs(Entry):
def SetCalculatedProperties(self):
"""Set the value of device-tree properties calculated by binman"""
super().SetCalculatedProperties()
for entry in self._cbfs_entries.values():
for entry in self._entries.values():
state.SetInt(entry._node, 'offset', entry.offset)
state.SetInt(entry._node, 'size', entry.size)
state.SetInt(entry._node, 'image-pos', entry.image_pos)
@@ -262,24 +270,26 @@ class Entry_cbfs(Entry):
def ListEntries(self, entries, indent):
"""Override this method to list all files in the section"""
super().ListEntries(entries, indent)
for entry in self._cbfs_entries.values():
for entry in self._entries.values():
entry.ListEntries(entries, indent + 1)
def GetEntries(self):
return self._cbfs_entries
return self._entries
def ReadData(self, decomp=True):
data = super().ReadData(True)
def ReadData(self, decomp=True, alt_format=None):
data = super().ReadData(True, alt_format)
return data
def ReadChildData(self, child, decomp=True):
def ReadChildData(self, child, decomp=True, alt_format=None):
if not self.reader:
data = super().ReadData(True)
data = super().ReadData(True, alt_format)
self.reader = cbfs_util.CbfsReader(data)
reader = self.reader
cfile = reader.files.get(child.name)
return cfile.data if decomp else cfile.orig_data
def WriteChildData(self, child):
self.ObtainContents(skip=child)
# Recreate the data structure, leaving the data for this child alone,
# so that child.data is used to pack into the FIP.
self.ObtainContents(skip_entry=child)
return True

View File

@@ -74,6 +74,10 @@ class Entry_fdtmap(Entry):
If allow-repack is used then 'orig-offset' and 'orig-size' properties are
added as necessary. See the binman README.
When extracting files, an alternative 'fdt' format is available for fdtmaps.
Use `binman extract -F fdt ...` to use this. It will export a devicetree,
without the fdtmap header, so it can be viewed with `fdtdump`.
"""
def __init__(self, section, etype, node):
# Put these here to allow entry-docs and help to work without libfdt
@@ -86,6 +90,10 @@ class Entry_fdtmap(Entry):
from dtoc.fdt import Fdt
super().__init__(section, etype, node)
self.alt_formats = ['fdt']
def CheckAltFormats(self, alt_formats):
alt_formats['fdt'] = self, 'Extract the devicetree blob from the fdtmap'
def _GetFdtmap(self):
"""Build an FDT map from the entries in the current image
@@ -147,3 +155,7 @@ class Entry_fdtmap(Entry):
processing, e.g. the image-pos properties.
"""
return self.ProcessContentsUpdate(self._GetFdtmap())
def GetAltFormat(self, data, alt_format):
if alt_format == 'fdt':
return data[FDTMAP_HDR_LEN:]

View File

@@ -64,4 +64,4 @@ class Entry_files(Entry_section):
state.AddInt(subnode, 'align', self._files_align)
# Read entries again, now that we have some
self._ReadEntries()
self.ReadEntries()

View File

@@ -136,10 +136,10 @@ class Entry_fit(Entry):
str)])[0]
def ReadNode(self):
self._ReadSubnodes()
self.ReadEntries()
super().ReadNode()
def _ReadSubnodes(self):
def ReadEntries(self):
def _AddNode(base_node, depth, node):
"""Add a node to the FIT

View File

@@ -50,7 +50,7 @@ class Entry_intel_ifwi(Entry_blob_ext):
self._ifwi_entries = OrderedDict()
def ReadNode(self):
self._ReadSubnodes()
self.ReadEntries()
super().ReadNode()
def _BuildIfwi(self):
@@ -117,7 +117,7 @@ class Entry_intel_ifwi(Entry_blob_ext):
same = orig_data == self.data
return same
def _ReadSubnodes(self):
def ReadEntries(self):
"""Read the subnodes to find out what should go in this IFWI"""
for node in self._node.subnodes:
entry = Entry.Create(self.section, node)

View File

@@ -37,7 +37,7 @@ class Entry_mkimage(Entry):
self._args = fdt_util.GetString(self._node, 'args').split(' ')
self._mkimage_entries = OrderedDict()
self.align_default = None
self._ReadSubnodes()
self.ReadEntries()
def ObtainContents(self):
data = b''
@@ -55,7 +55,7 @@ class Entry_mkimage(Entry):
self.SetContents(tools.ReadFile(output_fname))
return True
def _ReadSubnodes(self):
def ReadEntries(self):
"""Read the subnodes to find out what should go in this image"""
for node in self._node.subnodes:
entry = Entry.Create(self, node)

View File

@@ -24,34 +24,135 @@ from patman.tools import ToHexSize
class Entry_section(Entry):
"""Entry that contains other entries
Properties / Entry arguments: (see binman README for more information):
pad-byte: Pad byte to use when padding
sort-by-offset: True if entries should be sorted by offset, False if
they must be in-order in the device tree description
A section is an entry which can contain other entries, thus allowing
hierarchical images to be created. See 'Sections and hierarchical images'
in the binman README for more information.
end-at-4gb: Used to build an x86 ROM which ends at 4GB (2^32)
The base implementation simply joins the various entries together, using
various rules about alignment, etc.
skip-at-start: Number of bytes before the first entry starts. These
effectively adjust the starting offset of entries. For example,
if this is 16, then the first entry would start at 16. An entry
with offset = 20 would in fact be written at offset 4 in the image
file, since the first 16 bytes are skipped when writing.
name-prefix: Adds a prefix to the name of every entry in the section
when writing out the map
align_default: Default alignment for this section, if no alignment is
given in the entry
Subclassing
~~~~~~~~~~~
Properties:
allow_missing: True if this section permits external blobs to be
missing their contents. The second will produce an image but of
course it will not work.
This class can be subclassed to support other file formats which hold
multiple entries, such as CBFS. To do this, override the following
functions. The documentation here describes what your function should do.
For example code, see etypes which subclass `Entry_section`, or `cbfs.py`
for a more involved example::
$ grep -l \(Entry_section tools/binman/etype/*.py
ReadNode()
Call `super().ReadNode()`, then read any special properties for the
section. Then call `self.ReadEntries()` to read the entries.
Binman calls this at the start when reading the image description.
ReadEntries()
Read in the subnodes of the section. This may involve creating entries
of a particular etype automatically, as well as reading any special
properties in the entries. For each entry, entry.ReadNode() should be
called, to read the basic entry properties. The properties should be
added to `self._entries[]`, in the correct order, with a suitable name.
Binman calls this at the start when reading the image description.
BuildSectionData(required)
Create the custom file format that you want and return it as bytes.
This likely sets up a file header, then loops through the entries,
adding them to the file. For each entry, call `entry.GetData()` to
obtain the data. If that returns None, and `required` is False, then
this method must give up and return None. But if `required` is True then
it should assume that all data is valid.
Binman calls this when packing the image, to find out the size of
everything. It is called again at the end when building the final image.
SetImagePos(image_pos):
Call `super().SetImagePos(image_pos)`, then set the `image_pos` values
for each of the entries. This should use the custom file format to find
the `start offset` (and `image_pos`) of each entry. If the file format
uses compression in such a way that there is no offset available (other
than reading the whole file and decompressing it), then the offsets for
affected entries can remain unset (`None`). The size should also be set
if possible.
Binman calls this after the image has been packed, to update the
location that all the entries ended up at.
ReadChildData(child, decomp, alt_format):
The default version of this may be good enough, if you are able to
implement SetImagePos() correctly. But that is a bit of a bypass, so
you can override this method to read from your custom file format. It
should read the entire entry containing the custom file using
`super().ReadData(True)`, then parse the file to get the data for the
given child, then return that data.
If your file format supports compression, the `decomp` argument tells
you whether to return the compressed data (`decomp` is False) or to
uncompress it first, then return the uncompressed data (`decomp` is
True). This is used by the `binman extract -U` option.
If your entry supports alternative formats, the alt_format provides the
alternative format that the user has selected. Your function should
return data in that format. This is used by the 'binman extract -l'
option.
Binman calls this when reading in an image, in order to populate all the
entries with the data from that image (`binman ls`).
WriteChildData(child):
Binman calls this after `child.data` is updated, to inform the custom
file format about this, in case it needs to do updates.
The default version of this does nothing and probably needs to be
overridden for the 'binman replace' command to work. Your version should
use `child.data` to update the data for that child in the custom file
format.
Binman calls this when updating an image that has been read in and in
particular to update the data for a particular entry (`binman replace`)
Properties / Entry arguments
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
See :ref:`develop/package/binman:Image description format` for more
information.
align-default
Default alignment for this section, if no alignment is given in the
entry
pad-byte
Pad byte to use when padding
sort-by-offset
True if entries should be sorted by offset, False if they must be
in-order in the device tree description
end-at-4gb
Used to build an x86 ROM which ends at 4GB (2^32)
name-prefix
Adds a prefix to the name of every entry in the section when writing out
the map
skip-at-start
Number of bytes before the first entry starts. These effectively adjust
the starting offset of entries. For example, if this is 16, then the
first entry would start at 16. An entry with offset = 20 would in fact
be written at offset 4 in the image file, since the first 16 bytes are
skipped when writing.
Since a section is also an entry, it inherits all the properies of entries
too.
A section is an entry which can contain other entries, thus allowing
hierarchical images to be created. See 'Sections and hierarchical images'
in the binman README for more information.
Note that the `allow_missing` member controls whether this section permits
external blobs to be missing their contents. The option will produce an
image but of course it will not work. It is useful to make sure that
Continuous Integration systems can build without the binaries being
available. This is set by the `SetAllowMissing()` method, if
`--allow-missing` is passed to binman.
"""
def __init__(self, section, etype, node, test=False):
if not test:
@@ -81,18 +182,16 @@ class Entry_section(Entry):
self._skip_at_start = 0
self._name_prefix = fdt_util.GetString(self._node, 'name-prefix')
self.align_default = fdt_util.GetInt(self._node, 'align-default', 0)
filename = fdt_util.GetString(self._node, 'filename')
if filename:
self._filename = filename
self._ReadEntries()
self.ReadEntries()
def _ReadEntries(self):
def ReadEntries(self):
for node in self._node.subnodes:
if node.name.startswith('hash') or node.name.startswith('signature'):
continue
entry = Entry.Create(self, node,
expanded=self.GetImage().use_expanded)
expanded=self.GetImage().use_expanded,
missing_etype=self.GetImage().missing_etype)
entry.ReadNode()
entry.SetPrefix(self._name_prefix)
self._entries[node.name] = entry
@@ -101,9 +200,9 @@ class Entry_section(Entry):
"""Raises an error for this section
Args:
msg: Error message to use in the raise string
msg (str): Error message to use in the raise string
Raises:
ValueError()
ValueError: always
"""
raise ValueError("Section '%s': %s" % (self._node.path, msg))
@@ -146,8 +245,8 @@ class Entry_section(Entry):
for entry in self._entries.values():
entry.AddMissingProperties(have_image_pos)
def ObtainContents(self):
return self.GetEntryContents()
def ObtainContents(self, skip_entry=None):
return self.GetEntryContents(skip_entry=skip_entry)
def GetPaddedDataForEntry(self, entry, entry_data):
"""Get the data for an entry including any padding
@@ -185,7 +284,7 @@ class Entry_section(Entry):
return data
def _BuildSectionData(self, required):
def BuildSectionData(self, required):
"""Build the contents of a section
This places all entries at the right place, dealing with padding before
@@ -193,6 +292,9 @@ class Entry_section(Entry):
pad-before and pad-after properties in the section items) since that is
handled by the parent section.
This should be overridden by subclasses which want to build their own
data structure for the section.
Args:
required: True if the data must be present, False if it is OK to
return None
@@ -204,6 +306,9 @@ class Entry_section(Entry):
for entry in self._entries.values():
entry_data = entry.GetData(required)
# This can happen when this section is referenced from a collection
# earlier in the image description. See testCollectionSection().
if not required and entry_data is None:
return None
data = self.GetPaddedDataForEntry(entry, entry_data)
@@ -253,7 +358,7 @@ class Entry_section(Entry):
This excludes any padding. If the section is compressed, the
compressed data is returned
"""
data = self._BuildSectionData(required)
data = self.BuildSectionData(required)
if data is None:
return None
self.SetContents(data)
@@ -281,7 +386,7 @@ class Entry_section(Entry):
self._SortEntries()
self._ExpandEntries()
data = self._BuildSectionData(True)
data = self.BuildSectionData(True)
self.SetContents(data)
self.CheckSize()
@@ -524,12 +629,13 @@ class Entry_section(Entry):
return entry
return None
def GetEntryContents(self):
def GetEntryContents(self, skip_entry=None):
"""Call ObtainContents() for each entry in the section
"""
def _CheckDone(entry):
if not entry.ObtainContents():
next_todo.append(entry)
if entry != skip_entry:
if not entry.ObtainContents():
next_todo.append(entry)
return entry
todo = self._entries.values()
@@ -617,7 +723,7 @@ class Entry_section(Entry):
def ListEntries(self, entries, indent):
"""List the files in the section"""
Entry.AddEntryInfo(entries, indent, self.name, 'section', self.size,
Entry.AddEntryInfo(entries, indent, self.name, self.etype, self.size,
self.image_pos, None, self.offset, self)
for entry in self._entries.values():
entry.ListEntries(entries, indent + 1)
@@ -649,9 +755,9 @@ class Entry_section(Entry):
"""
return self._sort
def ReadData(self, decomp=True):
def ReadData(self, decomp=True, alt_format=None):
tout.Info("ReadData path='%s'" % self.GetPath())
parent_data = self.section.ReadData(True)
parent_data = self.section.ReadData(True, alt_format)
offset = self.offset - self.section._skip_at_start
data = parent_data[offset:offset + self.size]
tout.Info(
@@ -660,9 +766,9 @@ class Entry_section(Entry):
self.size, len(data)))
return data
def ReadChildData(self, child, decomp=True):
tout.Debug("ReadChildData for child '%s'" % child.GetPath())
parent_data = self.ReadData(True)
def ReadChildData(self, child, decomp=True, alt_format=None):
tout.Debug(f"ReadChildData for child '{child.GetPath()}'")
parent_data = self.ReadData(True, alt_format)
offset = child.offset - self._skip_at_start
tout.Debug("Extract for child '%s': offset %#x, skip_at_start %#x, result %#x" %
(child.GetPath(), child.offset, self._skip_at_start, offset))
@@ -674,6 +780,10 @@ class Entry_section(Entry):
tout.Info("%s: Decompressing data size %#x with algo '%s' to data size %#x" %
(child.GetPath(), len(indata), child.compress,
len(data)))
if alt_format:
new_data = child.GetAltFormat(data, alt_format)
if new_data is not None:
data = new_data
return data
def WriteChildData(self, child):
@@ -738,8 +848,14 @@ class Entry_section(Entry):
nothing.
Args:
missing: List of missing properties / entry args, each a string
entry (Entry): Entry to raise the error on
missing (list of str): List of missing properties / entry args, each
a string
"""
if not self._ignore_missing:
entry.Raise('Missing required properties/entry args: %s' %
(', '.join(missing)))
missing = ', '.join(missing)
entry.Raise(f'Missing required properties/entry args: {missing}')
def CheckAltFormats(self, alt_formats):
for entry in self._entries.values():
entry.CheckAltFormats(alt_formats)

View File

@@ -2251,7 +2251,7 @@ class TestFunctional(unittest.TestCase):
self._DoReadFile('107_cbfs_no_size.dts')
self.assertIn('entry must have a size property', str(e.exception))
def testCbfsNoCOntents(self):
def testCbfsNoContents(self):
"""Test handling of a CBFS entry which does not provide contentsy"""
with self.assertRaises(ValueError) as e:
self._DoReadFile('108_cbfs_no_contents.dts')
@@ -4533,7 +4533,7 @@ class TestFunctional(unittest.TestCase):
def testCollectionSection(self):
"""Test a collection where a section must be built first"""
# Sections never have their contents when GetData() is called, but when
# _BuildSectionData() is called with required=True, a section will force
# BuildSectionData() is called with required=True, a section will force
# building the contents, producing an error is anything is still
# missing.
data = self._DoReadFile('199_collection_section.dts')
@@ -4661,6 +4661,80 @@ class TestFunctional(unittest.TestCase):
str(e.exception),
"Not enough space in '.*u_boot_binman_embed_sm' for data length.*")
def testVersion(self):
"""Test we can get the binman version"""
version = '(unreleased)'
self.assertEqual(version, state.GetVersion(self._indir))
with self.assertRaises(SystemExit):
with test_util.capture_sys_output() as (_, stderr):
self._DoBinman('-V')
self.assertEqual('Binman %s\n' % version, stderr.getvalue())
# Try running the tool too, just to be safe
result = self._RunBinman('-V')
self.assertEqual('Binman %s\n' % version, result.stderr)
# Set up a version file to make sure that works
version = 'v2025.01-rc2'
tools.WriteFile(os.path.join(self._indir, 'version'), version,
binary=False)
self.assertEqual(version, state.GetVersion(self._indir))
def testAltFormat(self):
"""Test that alternative formats can be used to extract"""
self._DoReadFileRealDtb('213_fdtmap_alt_format.dts')
try:
tmpdir, updated_fname = self._SetupImageInTmpdir()
with test_util.capture_sys_output() as (stdout, _):
self._DoBinman('extract', '-i', updated_fname, '-F', 'list')
self.assertEqual(
'''Flag (-F) Entry type Description
fdt fdtmap Extract the devicetree blob from the fdtmap
''',
stdout.getvalue())
dtb = os.path.join(tmpdir, 'fdt.dtb')
self._DoBinman('extract', '-i', updated_fname, '-F', 'fdt', '-f',
dtb, 'fdtmap')
# Check that we can read it and it can be scanning, meaning it does
# not have a 16-byte fdtmap header
data = tools.ReadFile(dtb)
dtb = fdt.Fdt.FromData(data)
dtb.Scan()
# Now check u-boot which has no alt_format
fname = os.path.join(tmpdir, 'fdt.dtb')
self._DoBinman('extract', '-i', updated_fname, '-F', 'dummy',
'-f', fname, 'u-boot')
data = tools.ReadFile(fname)
self.assertEqual(U_BOOT_DATA, data)
finally:
shutil.rmtree(tmpdir)
def testExtblobList(self):
"""Test an image with an external blob list"""
data = self._DoReadFile('215_blob_ext_list.dts')
self.assertEqual(REFCODE_DATA + FSP_M_DATA, data)
def testExtblobListMissing(self):
"""Test an image with a missing external blob"""
with self.assertRaises(ValueError) as e:
self._DoReadFile('216_blob_ext_list_missing.dts')
self.assertIn("Filename 'missing-file' not found in input path",
str(e.exception))
def testExtblobListMissingOk(self):
"""Test an image with an missing external blob that is allowed"""
with test_util.capture_sys_output() as (stdout, stderr):
self._DoTestFile('216_blob_ext_list_missing.dts',
allow_missing=True)
err = stderr.getvalue()
self.assertRegex(err, "Image 'main-section'.*missing.*: blob-ext")
if __name__ == "__main__":
unittest.main()

View File

@@ -63,9 +63,13 @@ class Image(section.Entry_section):
to ignore 'u-boot-bin' in this case, and build it ourselves in
binman with 'u-boot-dtb.bin' and 'u-boot.dtb'. See
Entry_u_boot_expanded and Entry_blob_phase for details.
missing_etype: Use a default entry type ('blob') if the requested one
does not exist in binman. This is useful if an image was created by
binman a newer version of binman but we want to list it in an older
version which does not support all the entry types.
"""
def __init__(self, name, node, copy_to_orig=True, test=False,
ignore_missing=False, use_expanded=False):
ignore_missing=False, use_expanded=False, missing_etype=False):
super().__init__(None, 'section', node, test=test)
self.copy_to_orig = copy_to_orig
self.name = 'main-section'
@@ -75,6 +79,7 @@ class Image(section.Entry_section):
self.fdtmap_data = None
self.allow_repack = False
self._ignore_missing = ignore_missing
self.missing_etype = missing_etype
self.use_expanded = use_expanded
self.test_section_timeout = False
if not test:
@@ -124,7 +129,8 @@ class Image(section.Entry_section):
# Return an Image with the associated nodes
root = dtb.GetRoot()
image = Image('image', root, copy_to_orig=False, ignore_missing=True)
image = Image('image', root, copy_to_orig=False, ignore_missing=True,
missing_etype=True)
image.image_node = fdt_util.GetString(root, 'image-node', 'image')
image.fdtmap_dtb = dtb
@@ -217,7 +223,7 @@ class Image(section.Entry_section):
entries = entry.GetEntries()
return entry
def ReadData(self, decomp=True):
def ReadData(self, decomp=True, alt_format=None):
tout.Debug("Image '%s' ReadData(), size=%#x" %
(self.GetPath(), len(self._data)))
return self._data

View File

@@ -16,6 +16,8 @@ import os
from patman import tools
from patman import tout
OUR_PATH = os.path.dirname(os.path.realpath(__file__))
# Map an dtb etype to its expected filename
DTB_TYPE_FNAME = {
'u-boot-spl-dtb': 'spl/u-boot-spl.dtb',
@@ -515,3 +517,19 @@ def TimingShow():
for name, seconds in duration.items():
print('%10s: %10.1fms' % (name, seconds * 1000))
def GetVersion(path=OUR_PATH):
"""Get the version string for binman
Args:
path: Path to 'version' file
Returns:
str: String version, e.g. 'v2021.10'
"""
version_fname = os.path.join(path, 'version')
if os.path.exists(version_fname):
version = tools.ReadFile(version_fname, binary=False)
else:
version = '(unreleased)'
return version

View File

@@ -0,0 +1,15 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
u-boot {
};
fdtmap {
};
};
};

View File

@@ -0,0 +1,13 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
u-boot {
};
};
};

View File

@@ -0,0 +1,14 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
blob-ext-list {
filenames = "refcode.bin", "fsp_m.bin";
};
};
};

View File

@@ -0,0 +1,14 @@
// SPDX-License-Identifier: GPL-2.0+
/dts-v1/;
/ {
#address-cells = <1>;
#size-cells = <1>;
binman {
blob-ext-list {
filenames = "refcode.bin", "missing-file";
};
};
};

View File

@@ -27,6 +27,18 @@ def fdt32_to_cpu(val):
"""
return struct.unpack('>I', val)[0]
def fdt64_to_cpu(val):
"""Convert a device tree cell to an integer
Args:
val (list): Value to convert (list of 2 4-character strings representing
the cell value)
Return:
int: A native-endian integer value
"""
return fdt32_to_cpu(val[0]) << 32 | fdt32_to_cpu(val[1])
def fdt_cells_to_cpu(val, cells):
"""Convert one or two cells to a long integer
@@ -108,6 +120,29 @@ def GetInt(node, propname, default=None):
value = fdt32_to_cpu(prop.value)
return value
def GetInt64(node, propname, default=None):
"""Get a 64-bit integer from a property
Args:
node (Node): Node object to read from
propname (str): property name to read
default (int): Default value to use if the node/property do not exist
Returns:
int: value read, or default if none
Raises:
ValueError: Property is not of the correct size
"""
prop = node.props.get(propname)
if not prop:
return default
if not isinstance(prop.value, list) or len(prop.value) != 2:
raise ValueError("Node '%s' property '%s' should be a list with 2 items for 64-bit values" %
(node.name, propname))
value = fdt64_to_cpu(prop.value)
return value
def GetString(node, propname, default=None):
"""Get a string from a property
@@ -128,6 +163,27 @@ def GetString(node, propname, default=None):
"a single string" % (node.name, propname))
return value
def GetStringList(node, propname, default=None):
"""Get a string list from a property
Args:
node (Node): Node object to read from
propname (str): property name to read
default (list of str): Default value to use if the node/property do not
exist, or None
Returns:
String value read, or default if none
"""
prop = node.props.get(propname)
if not prop:
return default
value = prop.value
if not isinstance(value, list):
strval = GetString(node, propname)
return [strval]
return value
def GetBool(node, propname, default=False):
"""Get an boolean from a property
@@ -167,6 +223,26 @@ def GetByte(node, propname, default=None):
(node.name, propname, len(value), 1))
return ord(value[0])
def GetBytes(node, propname, size, default=None):
"""Get a set of bytes from a property
Args:
node (Node): Node object to read from
propname (str): property name to read
size (int): Number of bytes to expect
default (bytes): Default value or None
Returns:
bytes: Bytes value read, or default if none
"""
prop = node.props.get(propname)
if not prop:
return default
if len(prop.bytes) != size:
raise ValueError("Node '%s' property '%s' has length %d, expecting %d" %
(node.name, propname, len(prop.bytes), size))
return prop.bytes
def GetPhandleList(node, propname):
"""Get a list of phandles from a property

View File

@@ -16,6 +16,7 @@
boolval;
maybe-empty-int = <>;
intval = <1>;
int64val = /bits/ 64 <0x123456789abcdef0>;
intarray = <2 3 4>;
byteval = [05];
bytearray = [06];

View File

@@ -296,6 +296,7 @@ struct dtd_sandbox_spl_test {
\tbool\t\tboolval;
\tunsigned char\tbytearray[3];
\tunsigned char\tbyteval;
\tfdt32_t\t\tint64val[2];
\tfdt32_t\t\tintarray[3];
\tfdt32_t\t\tintval;
\tunsigned char\tlongbytearray[9];
@@ -355,6 +356,7 @@ static struct dtd_sandbox_spl_test dtv_spl_test = {
\t.boolval\t\t= true,
\t.bytearray\t\t= {0x6, 0x0, 0x0},
\t.byteval\t\t= 0x5,
\t.int64val\t\t= {0x12345678, 0x9abcdef0},
\t.intarray\t\t= {0x2, 0x3, 0x4},
\t.intval\t\t\t= 0x1,
\t.longbytearray\t\t= {0x9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf, 0x10,

View File

@@ -16,9 +16,15 @@ import unittest
our_path = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(1, os.path.join(our_path, '..'))
# Bring in the libfdt module
sys.path.insert(2, 'scripts/dtc/pylibfdt')
sys.path.insert(2, os.path.join(our_path, '../../scripts/dtc/pylibfdt'))
sys.path.insert(2, os.path.join(our_path,
'../../build-sandbox_spl/scripts/dtc/pylibfdt'))
from dtoc import fdt
from dtoc import fdt_util
from dtoc.fdt_util import fdt32_to_cpu
from dtoc.fdt_util import fdt32_to_cpu, fdt64_to_cpu
from fdt import Type, BytesToValue
import libfdt
from patman import command
@@ -122,7 +128,7 @@ class TestFdt(unittest.TestCase):
node = self.dtb.GetNode('/spl-test')
props = self.dtb.GetProps(node)
self.assertEqual(['boolval', 'bytearray', 'byteval', 'compatible',
'intarray', 'intval', 'longbytearray',
'int64val', 'intarray', 'intval', 'longbytearray',
'maybe-empty-int', 'notstring', 'stringarray',
'stringval', 'u-boot,dm-pre-reloc'],
sorted(props.keys()))
@@ -329,6 +335,10 @@ class TestProp(unittest.TestCase):
self.assertEqual(Type.INT, prop.type)
self.assertEqual(1, fdt32_to_cpu(prop.value))
prop = self._ConvertProp('int64val')
self.assertEqual(Type.INT, prop.type)
self.assertEqual(0x123456789abcdef0, fdt64_to_cpu(prop.value))
prop = self._ConvertProp('intarray')
self.assertEqual(Type.INT, prop.type)
val = [fdt32_to_cpu(val) for val in prop.value]
@@ -580,10 +590,21 @@ class TestFdtUtil(unittest.TestCase):
self.assertEqual(3, fdt_util.GetInt(self.node, 'missing', 3))
with self.assertRaises(ValueError) as e:
self.assertEqual(3, fdt_util.GetInt(self.node, 'intarray'))
fdt_util.GetInt(self.node, 'intarray')
self.assertIn("property 'intarray' has list value: expecting a single "
'integer', str(e.exception))
def testGetInt64(self):
self.assertEqual(0x123456789abcdef0,
fdt_util.GetInt64(self.node, 'int64val'))
self.assertEqual(3, fdt_util.GetInt64(self.node, 'missing', 3))
with self.assertRaises(ValueError) as e:
fdt_util.GetInt64(self.node, 'intarray')
self.assertIn(
"property 'intarray' should be a list with 2 items for 64-bit values",
str(e.exception))
def testGetString(self):
self.assertEqual('message', fdt_util.GetString(self.node, 'stringval'))
self.assertEqual('test', fdt_util.GetString(self.node, 'missing',
@@ -594,6 +615,15 @@ class TestFdtUtil(unittest.TestCase):
self.assertIn("property 'stringarray' has list value: expecting a "
'single string', str(e.exception))
def testGetStringList(self):
self.assertEqual(['message'],
fdt_util.GetStringList(self.node, 'stringval'))
self.assertEqual(
['multi-word', 'message'],
fdt_util.GetStringList(self.node, 'stringarray'))
self.assertEqual(['test'],
fdt_util.GetStringList(self.node, 'missing', ['test']))
def testGetBool(self):
self.assertEqual(True, fdt_util.GetBool(self.node, 'boolval'))
self.assertEqual(False, fdt_util.GetBool(self.node, 'missing'))
@@ -614,6 +644,23 @@ class TestFdtUtil(unittest.TestCase):
self.assertIn("property 'intval' has length 4, expecting 1",
str(e.exception))
def testGetBytes(self):
self.assertEqual(bytes([5]), fdt_util.GetBytes(self.node, 'byteval', 1))
self.assertEqual(None, fdt_util.GetBytes(self.node, 'missing', 3))
self.assertEqual(
bytes([3]), fdt_util.GetBytes(self.node, 'missing', 3, bytes([3])))
with self.assertRaises(ValueError) as e:
fdt_util.GetBytes(self.node, 'longbytearray', 7)
self.assertIn(
"Node 'spl-test' property 'longbytearray' has length 9, expecting 7",
str(e.exception))
self.assertEqual(
bytes([0, 0, 0, 1]), fdt_util.GetBytes(self.node, 'intval', 4))
self.assertEqual(
bytes([3]), fdt_util.GetBytes(self.node, 'missing', 3, bytes([3])))
def testGetPhandleList(self):
dtb = fdt.FdtScan(find_dtb_file('dtoc_test_phandle.dts'))
node = dtb.GetNode('/phandle-source2')