Commit dd8c86c6 authored by Linus Torvalds's avatar Linus Torvalds

Merge tag 'ktest-v5.13' of git://git.kernel.org/pub/scm/linux/kernel/git/rostedt/linux-ktest

Pull ktest updates from Steven Rostedt:

 - Added a KTEST section in the MAINTAINERS file

 - Included John Hawley as a co-maintainer

 - Add an example config that would work with VMware workstation guests

 - Cleanups to the code

* tag 'ktest-v5.13' of git://git.kernel.org/pub/scm/linux/kernel/git/rostedt/linux-ktest:
  ktest: Add KTEST section to MAINTAINERS file
  ktest: Re-arrange the code blocks for better discoverability
  ktest: Further consistency cleanups
  ktest: Fixing indentation to match expected pattern
  ktest: Adding editor hints to improve consistency
  ktest: Add example config for using VMware VMs
  ktest: Minor cleanup with uninitialized variable $build_options
parents d29c9bb0 c7ceee69
......@@ -10170,6 +10170,12 @@ S: Maintained
F: Documentation/devicetree/bindings/leds/backlight/kinetic,ktd253.yaml
F: drivers/video/backlight/ktd253-backlight.c
KTEST
M: Steven Rostedt <rostedt@goodmis.org>
M: John Hawley <warthog9@eaglescrag.net>
S: Maintained
F: tools/testing/ktest
L3MDEV
M: David Ahern <dsahern@kernel.org>
L: netdev@vger.kernel.org
......
#
# This config is an example usage of ktest.pl with a vmware guest
#
# VMware Setup:
# -------------
# - Edit the Virtual Machine ("Edit virtual machine settings")
# - Add a Serial Port
# - You almost certainly want it set "Connect at power on"
# - Select "Use socket (named pipe)"
# - Select a name that you'll recognize, like 'ktestserialpipe'
# - From: Server
# - To: A Virtual Machine
# - Save
# - Make sure you note the name, it will be in the base directory of the
# virtual machine (where the "disks" are stored. The default
# is /var/lib/vmware/<virtual machine name>/<the name you entered above>
#
# - Make note of the path to the VM
# </End VMware setup>
#
# The guest is called 'Guest' and this would be something that
# could be run on the host to test a virtual machine target.
MACHINE = Guest
# Name of the serial pipe you set in the VMware settings
VMWARE_SERIAL_NAME = <the name you entered above>
# Define a variable of the name of the VM
# Noting this needs to be the name of the kmx file, and usually, the
# name of the directory that it's in. If the directory and name
# differ change the VMWARE_VM_DIR accordingly.
# Please ommit the .kmx extension
VMWARE_VM_NAME = <virtual machine name>
# VM dir name. This is usually the same as the virtual machine's name,
# but not always the case. Change if they differ
VMWARE_VM_DIR = ${VMWARE_VM_NAME}
# Base directory that the Virtual machine is contained in
# /var/lib/vmware is the default on Linux
VMWARE_VM_BASE_DIR = /var/lib/vmware/${VMWARE_VM_DIR}
# Use ncat to read the unix pipe. Anything that can read the Unix Pipe
# and output it's contents to stdout will work
CONSOLE = /usr/bin/ncat -U ${VMWARE_VM_BASE_DIR}/${VMWARE_SERIAL_NAME}
# Define what version of Workstation you are using
# This is used by vmrun to use the appropriate appripriate pieces to
# test this. In all likelihood you want 'ws' or 'player'
# Valid options:
# ws - Workstation (Windows or Linux host)
# fusion - Fusion (Mac host)
# player - Using VMware Player (Windows or Linux host)
# Note: vmrun has to run directly on the host machine
VMWARE_HOST_TYPE = ws
# VMware provides `vmrun` to allow you to do certain things to the virtual machine
# This should hard reset the VM and force a boot
VMWARE_POWER_CYCLE = /usr/bin/vmrun -T ${VMWARE_HOST_TYPE} reset ${VMWARE_VM_BASE_DIR}/${VMWARE_VM_NAME}.kmx nogui
#*************************************#
# This part is the same as test.conf #
#*************************************#
# The include files will set up the type of test to run. Just set TEST to
# which test you want to run.
#
# TESTS = patchcheck, randconfig, boot, test, config-bisect, bisect, min-config
#
# See the include/*.conf files that define these tests
#
TEST := patchcheck
# Some tests may have more than one test to run. Define MULTI := 1 to run
# the extra tests.
MULTI := 0
# In case you want to differentiate which type of system you are testing
BITS := 64
# REBOOT = none, error, fail, empty
# See include/defaults.conf
REBOOT := empty
# The defaults file will set up various settings that can be used by all
# machine configs.
INCLUDE include/defaults.conf
#*************************************#
# Now we are different from test.conf #
#*************************************#
# The example here assumes that Guest is running a Fedora release
# that uses dracut for its initfs. The POST_INSTALL will be executed
# after the install of the kernel and modules are complete.
#
POST_INSTALL = ${SSH} /sbin/dracut -f /boot/initramfs-test.img $KERNEL_VERSION
# Guests sometimes get stuck on reboot. We wait 3 seconds after running
# the reboot command and then do a full power-cycle of the guest.
# This forces the guest to restart.
#
POWERCYCLE_AFTER_REBOOT = 3
# We do the same after the halt command, but this time we wait 20 seconds.
POWEROFF_AFTER_HALT = 20
# As the defaults.conf file has a POWER_CYCLE option already defined,
# and options can not be defined in the same section more than once
# (all DEFAULTS sections are considered the same). We use the
# DEFAULTS OVERRIDE to tell ktest.pl to ignore the previous defined
# options, for the options set in the OVERRIDE section.
#
DEFAULTS OVERRIDE
# Instead of using the default POWER_CYCLE option defined in
# defaults.conf, we use virsh to cycle it. To do so, we destroy
# the guest, wait 5 seconds, and then start it up again.
# Crude, but effective.
#
POWER_CYCLE = ${VMWARE_POWER_CYCLE}
DEFAULTS
# The following files each handle a different test case.
# Having them included allows you to set up more than one machine and share
# the same tests.
INCLUDE include/patchcheck.conf
INCLUDE include/tests.conf
INCLUDE include/bisect.conf
INCLUDE include/min-config.conf
......@@ -24,7 +24,7 @@ my %evals;
#default opts
my %default = (
"MAILER" => "sendmail", # default mailer
"MAILER" => "sendmail", # default mailer
"EMAIL_ON_ERROR" => 1,
"EMAIL_WHEN_FINISHED" => 1,
"EMAIL_WHEN_CANCELED" => 0,
......@@ -36,15 +36,15 @@ my %default = (
"CLOSE_CONSOLE_SIGNAL" => "INT",
"TIMEOUT" => 120,
"TMP_DIR" => "/tmp/ktest/\${MACHINE}",
"SLEEP_TIME" => 60, # sleep time between tests
"SLEEP_TIME" => 60, # sleep time between tests
"BUILD_NOCLEAN" => 0,
"REBOOT_ON_ERROR" => 0,
"POWEROFF_ON_ERROR" => 0,
"REBOOT_ON_SUCCESS" => 1,
"POWEROFF_ON_SUCCESS" => 0,
"BUILD_OPTIONS" => "",
"BISECT_SLEEP_TIME" => 60, # sleep time between bisects
"PATCHCHECK_SLEEP_TIME" => 60, # sleep time between patch checks
"BISECT_SLEEP_TIME" => 60, # sleep time between bisects
"PATCHCHECK_SLEEP_TIME" => 60, # sleep time between patch checks
"CLEAR_LOG" => 0,
"BISECT_MANUAL" => 0,
"BISECT_SKIP" => 1,
......@@ -512,6 +512,69 @@ $config_help{"REBOOT_SCRIPT"} = << "EOF"
EOF
;
# used with process_expression()
my $d = 0;
# defined before get_test_name()
my $in_die = 0;
# defined before process_warning_line()
my $check_build_re = ".*:.*(warning|error|Error):.*";
my $utf8_quote = "\\x{e2}\\x{80}(\\x{98}|\\x{99})";
# defined before child_finished()
my $child_done;
# config_ignore holds the configs that were set (or unset) for
# a good config and we will ignore these configs for the rest
# of a config bisect. These configs stay as they were.
my %config_ignore;
# config_set holds what all configs were set as.
my %config_set;
# config_off holds the set of configs that the bad config had disabled.
# We need to record them and set them in the .config when running
# olddefconfig, because olddefconfig keeps the defaults.
my %config_off;
# config_off_tmp holds a set of configs to turn off for now
my @config_off_tmp;
# config_list is the set of configs that are being tested
my %config_list;
my %null_config;
my %dependency;
# found above run_config_bisect()
my $pass = 1;
# found above add_dep()
my %depends;
my %depcount;
my $iflevel = 0;
my @ifdeps;
# prevent recursion
my %read_kconfigs;
# found above test_this_config()
my %min_configs;
my %keep_configs;
my %save_configs;
my %processed_configs;
my %nochange_config;
#
# These are first defined here, main function later on
#
sub run_command;
sub start_monitor;
sub end_monitor;
sub wait_for_monitor;
sub _logit {
if (defined($opt{"LOG_FILE"})) {
print LOG @_;
......@@ -537,7 +600,7 @@ sub read_prompt {
my $ans;
for (;;) {
if ($cancel) {
if ($cancel) {
print "$prompt [y/n/C] ";
} else {
print "$prompt [Y/n] ";
......@@ -760,7 +823,7 @@ sub process_variables {
# remove the space added in the beginning
$retval =~ s/ //;
return "$retval"
return "$retval";
}
sub set_value {
......@@ -863,7 +926,6 @@ sub value_defined {
defined($opt{$2});
}
my $d = 0;
sub process_expression {
my ($name, $val) = @_;
......@@ -978,7 +1040,6 @@ sub __read_config {
$override = 0;
if ($type eq "TEST_START") {
if ($num_tests_set) {
die "$name: $.: Can not specify both NUM_TESTS and TEST_START\n";
}
......@@ -1048,7 +1109,6 @@ sub __read_config {
$test_num = $old_test_num;
$repeat = $old_repeat;
}
} elsif (/^\s*ELSE\b(.*)$/) {
if (!$if) {
die "$name: $.: ELSE found with out matching IF section\n$_";
......@@ -1095,7 +1155,7 @@ sub __read_config {
}
}
}
if ( ! -r $file ) {
die "$name: $.: Can't read file $file\n$_";
}
......@@ -1186,13 +1246,13 @@ sub __read_config {
}
sub get_test_case {
print "What test case would you like to run?\n";
print " (build, install or boot)\n";
print " Other tests are available but require editing ktest.conf\n";
print " (see tools/testing/ktest/sample.conf)\n";
my $ans = <STDIN>;
chomp $ans;
$default{"TEST_TYPE"} = $ans;
print "What test case would you like to run?\n";
print " (build, install or boot)\n";
print " Other tests are available but require editing ktest.conf\n";
print " (see tools/testing/ktest/sample.conf)\n";
my $ans = <STDIN>;
chomp $ans;
$default{"TEST_TYPE"} = $ans;
}
sub read_config {
......@@ -1368,11 +1428,6 @@ sub eval_option {
return $option;
}
sub run_command;
sub start_monitor;
sub end_monitor;
sub wait_for_monitor;
sub reboot {
my ($time) = @_;
my $powercycle = 0;
......@@ -1457,8 +1512,6 @@ sub do_not_reboot {
($test_type eq "config_bisect" && $opt{"CONFIG_BISECT_TYPE[$i]"} eq "build");
}
my $in_die = 0;
sub get_test_name() {
my $name;
......@@ -1471,7 +1524,6 @@ sub get_test_name() {
}
sub dodie {
# avoid recursion
return if ($in_die);
$in_die = 1;
......@@ -1481,10 +1533,8 @@ sub dodie {
doprint "CRITICAL FAILURE... [TEST $i] ", @_, "\n";
if ($reboot_on_error && !do_not_reboot) {
doprint "REBOOTING\n";
reboot_to_good;
} elsif ($poweroff_on_error && defined($power_off)) {
doprint "POWERING OFF\n";
`$power_off`;
......@@ -1519,13 +1569,14 @@ sub dodie {
close O;
close L;
}
send_email("KTEST: critical failure for test $i [$name]",
"Your test started at $script_start_time has failed with:\n@_\n", $log_file);
send_email("KTEST: critical failure for test $i [$name]",
"Your test started at $script_start_time has failed with:\n@_\n", $log_file);
}
if ($monitor_cnt) {
# restore terminal settings
system("stty $stty_orig");
# restore terminal settings
system("stty $stty_orig");
}
if (defined($post_test)) {
......@@ -1709,81 +1760,81 @@ sub wait_for_monitor {
}
sub save_logs {
my ($result, $basedir) = @_;
my @t = localtime;
my $date = sprintf "%04d%02d%02d%02d%02d%02d",
1900+$t[5],$t[4],$t[3],$t[2],$t[1],$t[0];
my ($result, $basedir) = @_;
my @t = localtime;
my $date = sprintf "%04d%02d%02d%02d%02d%02d",
1900+$t[5],$t[4],$t[3],$t[2],$t[1],$t[0];
my $type = $build_type;
if ($type =~ /useconfig/) {
$type = "useconfig";
}
my $type = $build_type;
if ($type =~ /useconfig/) {
$type = "useconfig";
}
my $dir = "$machine-$test_type-$type-$result-$date";
my $dir = "$machine-$test_type-$type-$result-$date";
$dir = "$basedir/$dir";
$dir = "$basedir/$dir";
if (!-d $dir) {
mkpath($dir) or
dodie "can't create $dir";
}
if (!-d $dir) {
mkpath($dir) or
dodie "can't create $dir";
}
my %files = (
"config" => $output_config,
"buildlog" => $buildlog,
"dmesg" => $dmesg,
"testlog" => $testlog,
);
my %files = (
"config" => $output_config,
"buildlog" => $buildlog,
"dmesg" => $dmesg,
"testlog" => $testlog,
);
while (my ($name, $source) = each(%files)) {
if (-f "$source") {
cp "$source", "$dir/$name" or
dodie "failed to copy $source";
}
while (my ($name, $source) = each(%files)) {
if (-f "$source") {
cp "$source", "$dir/$name" or
dodie "failed to copy $source";
}
}
doprint "*** Saved info to $dir ***\n";
doprint "*** Saved info to $dir ***\n";
}
sub fail {
if ($die_on_failure) {
dodie @_;
}
if ($die_on_failure) {
dodie @_;
}
doprint "FAILED\n";
doprint "FAILED\n";
my $i = $iteration;
my $i = $iteration;
# no need to reboot for just building.
if (!do_not_reboot) {
doprint "REBOOTING\n";
reboot_to_good $sleep_time;
}
# no need to reboot for just building.
if (!do_not_reboot) {
doprint "REBOOTING\n";
reboot_to_good $sleep_time;
}
my $name = "";
my $name = "";
if (defined($test_name)) {
$name = " ($test_name)";
}
if (defined($test_name)) {
$name = " ($test_name)";
}
print_times;
print_times;
doprint "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n";
doprint "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n";
doprint "KTEST RESULT: TEST $i$name Failed: ", @_, "\n";
doprint "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n";
doprint "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n";
doprint "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n";
doprint "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n";
doprint "KTEST RESULT: TEST $i$name Failed: ", @_, "\n";
doprint "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n";
doprint "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n";
if (defined($store_failures)) {
save_logs "fail", $store_failures;
}
if (defined($store_failures)) {
save_logs "fail", $store_failures;
}
if (defined($post_test)) {
run_command $post_test;
}
if (defined($post_test)) {
run_command $post_test;
}
return 1;
return 1;
}
sub run_command {
......@@ -1915,8 +1966,8 @@ sub _get_grub_index {
my ($command, $target, $skip) = @_;
return if (defined($grub_number) && defined($last_grub_menu) &&
$last_grub_menu eq $grub_menu && defined($last_machine) &&
$last_machine eq $machine);
$last_grub_menu eq $grub_menu && defined($last_machine) &&
$last_machine eq $machine);
doprint "Find $reboot_type menu ... ";
$grub_number = -1;
......@@ -1924,8 +1975,8 @@ sub _get_grub_index {
my $ssh_grub = $ssh_exec;
$ssh_grub =~ s,\$SSH_COMMAND,$command,g;
open(IN, "$ssh_grub |")
or dodie "unable to execute $command";
open(IN, "$ssh_grub |") or
dodie "unable to execute $command";
my $found = 0;
......@@ -1969,9 +2020,9 @@ sub get_grub_index {
$target = '^menuentry.*' . $grub_menu_qt;
$skip = '^menuentry\s|^submenu\s';
} elsif ($reboot_type eq "grub2bls") {
$command = $grub_bls_get;
$target = '^title=.*' . $grub_menu_qt;
$skip = '^title=';
$command = $grub_bls_get;
$target = '^title=.*' . $grub_menu_qt;
$skip = '^title=';
} else {
return;
}
......@@ -1979,8 +2030,7 @@ sub get_grub_index {
_get_grub_index($command, $target, $skip);
}
sub wait_for_input
{
sub wait_for_input {
my ($fp, $time) = @_;
my $start_time;
my $rin;
......@@ -2096,7 +2146,6 @@ sub monitor {
my $version_found = 0;
while (!$done) {
if ($bug && defined($stop_after_failure) &&
$stop_after_failure >= 0) {
my $time = $stop_after_failure - (time - $failure_start);
......@@ -2349,9 +2398,6 @@ sub start_monitor_and_install {
return monitor;
}
my $check_build_re = ".*:.*(warning|error|Error):.*";
my $utf8_quote = "\\x{e2}\\x{80}(\\x{98}|\\x{99})";
sub process_warning_line {
my ($line) = @_;
......@@ -2394,7 +2440,7 @@ sub check_buildlog {
while (<IN>) {
if (/$check_build_re/) {
my $warning = process_warning_line $_;
$warnings_list{$warning} = 1;
}
}
......@@ -2571,7 +2617,6 @@ sub build {
run_command "mv $outputdir/config_temp $output_config" or
dodie "moving config_temp";
}
} elsif (!$noclean) {
unlink "$output_config";
run_command "$make mrproper" or
......@@ -2594,6 +2639,9 @@ sub build {
# Run old config regardless, to enforce min configurations
make_oldconfig;
if (not defined($build_options)){
$build_options = "";
}
my $build_ret = run_command "$make $build_options", $buildlog;
if (defined($post_build)) {
......@@ -2649,14 +2697,15 @@ sub success {
print_times;
doprint "\n\n*******************************************\n";
doprint "*******************************************\n";
doprint "KTEST RESULT: TEST $i$name SUCCESS!!!! **\n";
doprint "*******************************************\n";
doprint "*******************************************\n";
doprint "\n\n";
doprint "*******************************************\n";
doprint "*******************************************\n";
doprint "KTEST RESULT: TEST $i$name SUCCESS!!!! **\n";
doprint "*******************************************\n";
doprint "*******************************************\n";
if (defined($store_successes)) {
save_logs "success", $store_successes;
save_logs "success", $store_successes;
}
if ($i != $opt{"NUM_TESTS"} && !do_not_reboot) {
......@@ -2698,8 +2747,6 @@ sub child_run_test {
exit $run_command_status;
}
my $child_done;
sub child_finished {
$child_done = 1;
}
......@@ -3031,7 +3078,6 @@ sub bisect {
}
if ($do_check) {
# get current HEAD
my $head = get_sha1("HEAD");
......@@ -3071,13 +3117,11 @@ sub bisect {
run_command "git bisect replay $replay" or
dodie "failed to run replay";
} else {
run_command "git bisect good $good" or
dodie "could not set bisect good to $good";
run_git_bisect "git bisect bad $bad" or
dodie "could not set bisect bad to $bad";
}
if (defined($start)) {
......@@ -3103,35 +3147,13 @@ sub bisect {
success $i;
}
# config_ignore holds the configs that were set (or unset) for
# a good config and we will ignore these configs for the rest
# of a config bisect. These configs stay as they were.
my %config_ignore;
# config_set holds what all configs were set as.
my %config_set;
# config_off holds the set of configs that the bad config had disabled.
# We need to record them and set them in the .config when running
# olddefconfig, because olddefconfig keeps the defaults.
my %config_off;
# config_off_tmp holds a set of configs to turn off for now
my @config_off_tmp;
# config_list is the set of configs that are being tested
my %config_list;
my %null_config;
my %dependency;
sub assign_configs {
my ($hash, $config) = @_;
doprint "Reading configs from $config\n";
open (IN, $config)
or dodie "Failed to read $config";
open (IN, $config) or
dodie "Failed to read $config";
while (<IN>) {
chomp;
......@@ -3219,8 +3241,6 @@ sub config_bisect_end {
doprint "***************************************\n\n";
}
my $pass = 1;
sub run_config_bisect {
my ($good, $bad, $last_result) = @_;
my $reset = "";
......@@ -3243,13 +3263,13 @@ sub run_config_bisect {
$ret = run_config_bisect_test $config_bisect_type;
if ($ret) {
doprint "NEW GOOD CONFIG ($pass)\n";
doprint "NEW GOOD CONFIG ($pass)\n";
system("cp $output_config $tmpdir/good_config.tmp.$pass");
$pass++;
# Return 3 for good config
return 3;
} else {
doprint "NEW BAD CONFIG ($pass)\n";
doprint "NEW BAD CONFIG ($pass)\n";
system("cp $output_config $tmpdir/bad_config.tmp.$pass");
$pass++;
# Return 4 for bad config
......@@ -3284,10 +3304,11 @@ sub config_bisect {
if (!defined($config_bisect_exec)) {
# First check the location that ktest.pl ran
my @locations = ( "$pwd/config-bisect.pl",
"$dirname/config-bisect.pl",
"$builddir/tools/testing/ktest/config-bisect.pl",
undef );
my @locations = (
"$pwd/config-bisect.pl",
"$dirname/config-bisect.pl",
"$builddir/tools/testing/ktest/config-bisect.pl",
undef );
foreach my $loc (@locations) {
doprint "loc = $loc\n";
$config_bisect_exec = $loc;
......@@ -3368,7 +3389,7 @@ sub config_bisect {
} while ($ret == 3 || $ret == 4);
if ($ret == 2) {
config_bisect_end "$good_config.tmp", "$bad_config.tmp";
config_bisect_end "$good_config.tmp", "$bad_config.tmp";
}
return $ret if ($ret < 0);
......@@ -3511,14 +3532,6 @@ sub patchcheck {
return 1;
}
my %depends;
my %depcount;
my $iflevel = 0;
my @ifdeps;
# prevent recursion
my %read_kconfigs;
sub add_dep {
# $config depends on $dep
my ($config, $dep) = @_;
......@@ -3548,7 +3561,6 @@ sub read_kconfig {
my $cont = 0;
my $line;
if (! -f $kconfig) {
doprint "file $kconfig does not exist, skipping\n";
return;
......@@ -3630,8 +3642,8 @@ sub read_kconfig {
sub read_depends {
# find out which arch this is by the kconfig file
open (IN, $output_config)
or dodie "Failed to read $output_config";
open (IN, $output_config) or
dodie "Failed to read $output_config";
my $arch;
while (<IN>) {
if (m,Linux/(\S+)\s+\S+\s+Kernel Configuration,) {
......@@ -3657,7 +3669,7 @@ sub read_depends {
if (! -f $kconfig && $arch =~ /\d$/) {
my $orig = $arch;
# some subarchs have numbers, truncate them
# some subarchs have numbers, truncate them
$arch =~ s/\d*$//;
$kconfig = "$builddir/arch/$arch/Kconfig";
if (! -f $kconfig) {
......@@ -3706,7 +3718,6 @@ sub get_depends {
my @configs;
while ($dep =~ /[$valid]/) {
if ($dep =~ /^[^$valid]*([$valid]+)/) {
my $conf = "CONFIG_" . $1;
......@@ -3721,12 +3732,6 @@ sub get_depends {
return @configs;
}
my %min_configs;
my %keep_configs;
my %save_configs;
my %processed_configs;
my %nochange_config;
sub test_this_config {
my ($config) = @_;
......@@ -3852,7 +3857,7 @@ sub make_min_config {
foreach my $config (@config_keys) {
my $kconfig = chomp_config $config;
if (!defined $depcount{$kconfig}) {
$depcount{$kconfig} = 0;
$depcount{$kconfig} = 0;
}
}
......@@ -3887,7 +3892,6 @@ sub make_min_config {
my $take_two = 0;
while (!$done) {
my $config;
my $found;
......@@ -3898,7 +3902,7 @@ sub make_min_config {
# Sort keys by who is most dependent on
@test_configs = sort { $depcount{chomp_config($b)} <=> $depcount{chomp_config($a)} }
@test_configs ;
@test_configs ;
# Put configs that did not modify the config at the end.
my $reset = 1;
......@@ -3954,13 +3958,13 @@ sub make_min_config {
my $failed = 0;
build "oldconfig" or $failed = 1;
if (!$failed) {
start_monitor_and_install or $failed = 1;
start_monitor_and_install or $failed = 1;
if ($type eq "test" && !$failed) {
do_run_test or $failed = 1;
}
if ($type eq "test" && !$failed) {
do_run_test or $failed = 1;
}
end_monitor;
end_monitor;
}
$in_bisect = 0;
......@@ -3974,8 +3978,8 @@ sub make_min_config {
# update new ignore configs
if (defined($ignore_config)) {
open (OUT, ">$temp_config")
or dodie "Can't write to $temp_config";
open (OUT, ">$temp_config") or
dodie "Can't write to $temp_config";
foreach my $config (keys %save_configs) {
print OUT "$save_configs{$config}\n";
}
......@@ -4002,8 +4006,8 @@ sub make_min_config {
}
# Save off all the current mandatory configs
open (OUT, ">$temp_config")
or dodie "Can't write to $temp_config";
open (OUT, ">$temp_config") or
dodie "Can't write to $temp_config";
foreach my $config (keys %keep_configs) {
print OUT "$keep_configs{$config}\n";
}
......@@ -4041,7 +4045,6 @@ sub make_warnings_file {
open(IN, $buildlog) or dodie "Can't open $buildlog";
while (<IN>) {
# Some compilers use UTF-8 extended for quotes
# for distcc heterogeneous systems, this causes issues
s/$utf8_quote/'/g;
......@@ -4057,98 +4060,6 @@ sub make_warnings_file {
success $i;
}
$#ARGV < 1 or die "ktest.pl version: $VERSION\n usage: ktest.pl [config-file]\n";
if ($#ARGV == 0) {
$ktest_config = $ARGV[0];
if (! -f $ktest_config) {
print "$ktest_config does not exist.\n";
if (!read_yn "Create it?") {
exit 0;
}
}
}
if (! -f $ktest_config) {
$newconfig = 1;
get_test_case;
open(OUT, ">$ktest_config") or die "Can not create $ktest_config";
print OUT << "EOF"
# Generated by ktest.pl
#
# PWD is a ktest.pl variable that will result in the process working
# directory that ktest.pl is executed in.
# THIS_DIR is automatically assigned the PWD of the path that generated
# the config file. It is best to use this variable when assigning other
# directory paths within this directory. This allows you to easily
# move the test cases to other locations or to other machines.
#
THIS_DIR := $variable{"PWD"}
# Define each test with TEST_START
# The config options below it will override the defaults
TEST_START
TEST_TYPE = $default{"TEST_TYPE"}
DEFAULTS
EOF
;
close(OUT);
}
read_config $ktest_config;
if (defined($opt{"LOG_FILE"})) {
$opt{"LOG_FILE"} = eval_option("LOG_FILE", $opt{"LOG_FILE"}, -1);
}
# Append any configs entered in manually to the config file.
my @new_configs = keys %entered_configs;
if ($#new_configs >= 0) {
print "\nAppending entered in configs to $ktest_config\n";
open(OUT, ">>$ktest_config") or die "Can not append to $ktest_config";
foreach my $config (@new_configs) {
print OUT "$config = $entered_configs{$config}\n";
$opt{$config} = process_variables($entered_configs{$config});
}
}
if (defined($opt{"LOG_FILE"})) {
if ($opt{"CLEAR_LOG"}) {
unlink $opt{"LOG_FILE"};
}
open(LOG, ">> $opt{LOG_FILE}") or die "Can't write to $opt{LOG_FILE}";
LOG->autoflush(1);
}
doprint "\n\nSTARTING AUTOMATED TESTS\n\n";
for (my $i = 0, my $repeat = 1; $i <= $opt{"NUM_TESTS"}; $i += $repeat) {
if (!$i) {
doprint "DEFAULT OPTIONS:\n";
} else {
doprint "\nTEST $i OPTIONS";
if (defined($repeat_tests{$i})) {
$repeat = $repeat_tests{$i};
doprint " ITERATE $repeat";
}
doprint "\n";
}
foreach my $option (sort keys %opt) {
if ($option =~ /\[(\d+)\]$/) {
next if ($i != $1);
} else {
next if ($i);
}
doprint "$option = $opt{$option}\n";
}
}
sub option_defined {
my ($option) = @_;
......@@ -4261,7 +4172,6 @@ sub do_send_mail {
}
sub send_email {
if (defined($mailto)) {
if (!defined($mailer)) {
doprint "No email sent: email or mailer not specified in config.\n";
......@@ -4274,12 +4184,103 @@ sub send_email {
sub cancel_test {
if ($email_when_canceled) {
my $name = get_test_name;
send_email("KTEST: Your [$name] test was cancelled",
"Your test started at $script_start_time was cancelled: sig int");
send_email("KTEST: Your [$name] test was cancelled",
"Your test started at $script_start_time was cancelled: sig int");
}
die "\nCaught Sig Int, test interrupted: $!\n"
}
$#ARGV < 1 or die "ktest.pl version: $VERSION\n usage: ktest.pl [config-file]\n";
if ($#ARGV == 0) {
$ktest_config = $ARGV[0];
if (! -f $ktest_config) {
print "$ktest_config does not exist.\n";
if (!read_yn "Create it?") {
exit 0;
}
}
}
if (! -f $ktest_config) {
$newconfig = 1;
get_test_case;
open(OUT, ">$ktest_config") or die "Can not create $ktest_config";
print OUT << "EOF"
# Generated by ktest.pl
#
# PWD is a ktest.pl variable that will result in the process working
# directory that ktest.pl is executed in.
# THIS_DIR is automatically assigned the PWD of the path that generated
# the config file. It is best to use this variable when assigning other
# directory paths within this directory. This allows you to easily
# move the test cases to other locations or to other machines.
#
THIS_DIR := $variable{"PWD"}
# Define each test with TEST_START
# The config options below it will override the defaults
TEST_START
TEST_TYPE = $default{"TEST_TYPE"}
DEFAULTS
EOF
;
close(OUT);
}
read_config $ktest_config;
if (defined($opt{"LOG_FILE"})) {
$opt{"LOG_FILE"} = eval_option("LOG_FILE", $opt{"LOG_FILE"}, -1);
}
# Append any configs entered in manually to the config file.
my @new_configs = keys %entered_configs;
if ($#new_configs >= 0) {
print "\nAppending entered in configs to $ktest_config\n";
open(OUT, ">>$ktest_config") or die "Can not append to $ktest_config";
foreach my $config (@new_configs) {
print OUT "$config = $entered_configs{$config}\n";
$opt{$config} = process_variables($entered_configs{$config});
}
}
if (defined($opt{"LOG_FILE"})) {
if ($opt{"CLEAR_LOG"}) {
unlink $opt{"LOG_FILE"};
}
open(LOG, ">> $opt{LOG_FILE}") or die "Can't write to $opt{LOG_FILE}";
LOG->autoflush(1);
}
doprint "\n\nSTARTING AUTOMATED TESTS\n\n";
for (my $i = 0, my $repeat = 1; $i <= $opt{"NUM_TESTS"}; $i += $repeat) {
if (!$i) {
doprint "DEFAULT OPTIONS:\n";
} else {
doprint "\nTEST $i OPTIONS";
if (defined($repeat_tests{$i})) {
$repeat = $repeat_tests{$i};
doprint " ITERATE $repeat";
}
doprint "\n";
}
foreach my $option (sort keys %opt) {
if ($option =~ /\[(\d+)\]$/) {
next if ($i != $1);
} else {
next if ($i);
}
doprint "$option = $opt{$option}\n";
}
}
$SIG{INT} = qw(cancel_test);
# First we need to do is the builds
......@@ -4323,15 +4324,15 @@ for (my $i = 1; $i <= $opt{"NUM_TESTS"}; $i++) {
# The first test may override the PRE_KTEST option
if ($i == 1) {
if (defined($pre_ktest)) {
doprint "\n";
run_command $pre_ktest;
}
if ($email_when_started) {
if (defined($pre_ktest)) {
doprint "\n";
run_command $pre_ktest;
}
if ($email_when_started) {
my $name = get_test_name;
send_email("KTEST: Your [$name] test was started",
"Your test was started on $script_start_time");
}
send_email("KTEST: Your [$name] test was started",
"Your test was started on $script_start_time");
}
}
# Any test can override the POST_KTEST option
......@@ -4409,7 +4410,7 @@ for (my $i = 1; $i <= $opt{"NUM_TESTS"}; $i++) {
my $ret = run_command $pre_test;
if (!$ret && defined($pre_test_die) &&
$pre_test_die) {
dodie "failed to pre_test\n";
dodie "failed to pre_test\n";
}
}
......@@ -4503,12 +4504,11 @@ if ($opt{"POWEROFF_ON_SUCCESS"}) {
run_command $switch_to_good;
}
doprint "\n $successes of $opt{NUM_TESTS} tests were successful\n\n";
if ($email_when_finished) {
send_email("KTEST: Your test has finished!",
"$successes of $opt{NUM_TESTS} tests started at $script_start_time were successful!");
"$successes of $opt{NUM_TESTS} tests started at $script_start_time were successful!");
}
if (defined($opt{"LOG_FILE"})) {
......@@ -4517,3 +4517,12 @@ if (defined($opt{"LOG_FILE"})) {
}
exit 0;
##
# The following are here to standardize tabs/spaces/etc across the most likely editors
###
# Local Variables:
# mode: perl
# End:
# vim: softtabstop=4
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment