test/py: run all "ut" subtests

Invoke each "ut"-based unit test as a separate pytest.

Now that the DM unit test runs under test/py, remove the manual shell
script that invokes it.

Signed-off-by: Stephen Warren <swarren@nvidia.com>
Acked-by: Simon Glass <sjg@chromium.org>
Tested-by: Simon Glass <sjg@chromium.org> # v2, on sandbox
master
Stephen Warren 9 years ago committed by Tom Rini
parent 7ed4848bb8
commit 1cd85f571d
  1. 16
      test/dm/test-dm.sh
  2. 103
      test/py/conftest.py
  3. 29
      test/py/tests/test_ut.py

@ -1,16 +0,0 @@
#!/bin/sh
die() {
echo $1
exit 1
}
NUM_CPUS=$(cat /proc/cpuinfo |grep -c processor)
make O=sandbox sandbox_config || die "Cannot configure U-Boot"
make O=sandbox -s -j${NUM_CPUS} || die "Cannot build U-Boot"
dd if=/dev/zero of=spi.bin bs=1M count=2
echo -n "this is a test" > testflash.bin
dd if=/dev/zero bs=1M count=4 >>testflash.bin
./sandbox/u-boot -d ./sandbox/arch/sandbox/dts/test.dtb -c "ut dm"
rm spi.bin
rm testflash.bin

@ -21,6 +21,7 @@ import pexpect
import pytest
from _pytest.runner import runtestprotocol
import ConfigParser
import re
import StringIO
import sys
@ -199,8 +200,42 @@ def pytest_configure(config):
import u_boot_console_exec_attach
console = u_boot_console_exec_attach.ConsoleExecAttach(log, ubconfig)
def pytest_generate_tests(metafunc):
"""pytest hook: parameterize test functions based on custom rules.
re_ut_test_list = re.compile(r'_u_boot_list_2_(dm|env)_test_2_\1_test_(.*)\s*$')
def generate_ut_subtest(metafunc, fixture_name):
"""Provide parametrization for a ut_subtest fixture.
Determines the set of unit tests built into a U-Boot binary by parsing the
list of symbols generated by the build process. Provides this information
to test functions by parameterizing their ut_subtest fixture parameter.
Args:
metafunc: The pytest test function.
fixture_name: The fixture name to test.
Returns:
Nothing.
"""
fn = console.config.build_dir + '/u-boot.sym'
try:
with open(fn, 'rt') as f:
lines = f.readlines()
except:
lines = []
lines.sort()
vals = []
for l in lines:
m = re_ut_test_list.search(l)
if not m:
continue
vals.append(m.group(1) + ' ' + m.group(2))
ids = ['ut_' + s.replace(' ', '_') for s in vals]
metafunc.parametrize(fixture_name, vals, ids=ids)
def generate_config(metafunc, fixture_name):
"""Provide parametrization for {env,brd}__ fixtures.
If a test function takes parameter(s) (fixture names) of the form brd__xxx
or env__xxx, the brd and env configuration dictionaries are consulted to
@ -209,6 +244,7 @@ def pytest_generate_tests(metafunc):
Args:
metafunc: The pytest test function.
fixture_name: The fixture name to test.
Returns:
Nothing.
@ -218,30 +254,49 @@ def pytest_generate_tests(metafunc):
'brd': console.config.brd,
'env': console.config.env,
}
parts = fixture_name.split('__')
if len(parts) < 2:
return
if parts[0] not in subconfigs:
return
subconfig = subconfigs[parts[0]]
vals = []
val = subconfig.get(fixture_name, [])
# If that exact name is a key in the data source:
if val:
# ... use the dict value as a single parameter value.
vals = (val, )
else:
# ... otherwise, see if there's a key that contains a list of
# values to use instead.
vals = subconfig.get(fixture_name+ 's', [])
def fixture_id(index, val):
try:
return val['fixture_id']
except:
return fixture_name + str(index)
ids = [fixture_id(index, val) for (index, val) in enumerate(vals)]
metafunc.parametrize(fixture_name, vals, ids=ids)
def pytest_generate_tests(metafunc):
"""pytest hook: parameterize test functions based on custom rules.
Check each test function parameter (fixture name) to see if it is one of
our custom names, and if so, provide the correct parametrization for that
parameter.
Args:
metafunc: The pytest test function.
Returns:
Nothing.
"""
for fn in metafunc.fixturenames:
parts = fn.split('__')
if len(parts) < 2:
if fn == 'ut_subtest':
generate_ut_subtest(metafunc, fn)
continue
if parts[0] not in subconfigs:
continue
subconfig = subconfigs[parts[0]]
vals = []
val = subconfig.get(fn, [])
# If that exact name is a key in the data source:
if val:
# ... use the dict value as a single parameter value.
vals = (val, )
else:
# ... otherwise, see if there's a key that contains a list of
# values to use instead.
vals = subconfig.get(fn + 's', [])
def fixture_id(index, val):
try:
return val["fixture_id"]
except:
return fn + str(index)
ids = [fixture_id(index, val) for (index, val) in enumerate(vals)]
metafunc.parametrize(fn, vals, ids=ids)
generate_config(metafunc, fn)
@pytest.fixture(scope='function')
def u_boot_console(request):

@ -0,0 +1,29 @@
# Copyright (c) 2016, NVIDIA CORPORATION. All rights reserved.
#
# SPDX-License-Identifier: GPL-2.0
import os.path
import pytest
@pytest.mark.buildconfigspec('ut_dm')
def test_ut_dm_init(u_boot_console):
"""Initialize data for ut dm tests."""
fn = u_boot_console.config.source_dir + '/testflash.bin'
if not os.path.exists(fn):
data = 'this is a test'
data += '\x00' * ((4 * 1024 * 1024) - len(data))
with open(fn, 'wb') as fh:
fh.write(data)
fn = u_boot_console.config.source_dir + '/spi.bin'
if not os.path.exists(fn):
data = '\x00' * (2 * 1024 * 1024)
with open(fn, 'wb') as fh:
fh.write(data)
def test_ut(u_boot_console, ut_subtest):
"""Execute a "ut" subtest."""
output = u_boot_console.run_command('ut ' + ut_subtest)
assert output.endswith('Failures: 0')
Loading…
Cancel
Save