Commit 0ef67a88 authored by Kees Cook's avatar Kees Cook Committed by Shuah Khan

selftests/harness: Report skip reason

Use a share memory segment to pass string information between forked
test and the test runner for the skip reason.
Signed-off-by: default avatarKees Cook <keescook@chromium.org>
Signed-off-by: default avatarShuah Khan <skhan@linuxfoundation.org>
parent d088c928
...@@ -60,6 +60,7 @@ ...@@ -60,6 +60,7 @@
#include <stdio.h> #include <stdio.h>
#include <stdlib.h> #include <stdlib.h>
#include <string.h> #include <string.h>
#include <sys/mman.h>
#include <sys/types.h> #include <sys/types.h>
#include <sys/wait.h> #include <sys/wait.h>
#include <unistd.h> #include <unistd.h>
...@@ -122,9 +123,11 @@ ...@@ -122,9 +123,11 @@
* and runs "statement", which is usually "return" or "goto skip". * and runs "statement", which is usually "return" or "goto skip".
*/ */
#define SKIP(statement, fmt, ...) do { \ #define SKIP(statement, fmt, ...) do { \
snprintf(_metadata->results->reason, \
sizeof(_metadata->results->reason), fmt, ##__VA_ARGS__); \
if (TH_LOG_ENABLED) { \ if (TH_LOG_ENABLED) { \
fprintf(TH_LOG_STREAM, "# SKIP " fmt "\n", \ fprintf(TH_LOG_STREAM, "# SKIP %s\n", \
##__VA_ARGS__); \ _metadata->results->reason); \
} \ } \
_metadata->passed = 1; \ _metadata->passed = 1; \
_metadata->skip = 1; \ _metadata->skip = 1; \
...@@ -762,6 +765,10 @@ ...@@ -762,6 +765,10 @@
} \ } \
} }
struct __test_results {
char reason[1024]; /* Reason for test result */
};
struct __test_metadata; struct __test_metadata;
struct __fixture_variant_metadata; struct __fixture_variant_metadata;
...@@ -815,6 +822,7 @@ struct __test_metadata { ...@@ -815,6 +822,7 @@ struct __test_metadata {
bool timed_out; /* did this test timeout instead of exiting? */ bool timed_out; /* did this test timeout instead of exiting? */
__u8 step; __u8 step;
bool no_print; /* manual trigger when TH_LOG_STREAM is not available */ bool no_print; /* manual trigger when TH_LOG_STREAM is not available */
struct __test_results *results;
struct __test_metadata *prev, *next; struct __test_metadata *prev, *next;
}; };
...@@ -957,6 +965,7 @@ void __run_test(struct __fixture_metadata *f, ...@@ -957,6 +965,7 @@ void __run_test(struct __fixture_metadata *f,
t->trigger = 0; t->trigger = 0;
t->step = 0; t->step = 0;
t->no_print = 0; t->no_print = 0;
memset(t->results->reason, 0, sizeof(t->results->reason));
ksft_print_msg(" RUN %s%s%s.%s ...\n", ksft_print_msg(" RUN %s%s%s.%s ...\n",
f->name, variant->name[0] ? "." : "", variant->name, t->name); f->name, variant->name[0] ? "." : "", variant->name, t->name);
...@@ -986,8 +995,8 @@ void __run_test(struct __fixture_metadata *f, ...@@ -986,8 +995,8 @@ void __run_test(struct __fixture_metadata *f,
f->name, variant->name[0] ? "." : "", variant->name, t->name); f->name, variant->name[0] ? "." : "", variant->name, t->name);
if (t->skip) if (t->skip)
ksft_test_result_skip("%s%s%s.%s\n", ksft_test_result_skip("%s\n", t->results->reason[0] ?
f->name, variant->name[0] ? "." : "", variant->name, t->name); t->results->reason : "unknown");
else else
ksft_test_result(t->passed, "%s%s%s.%s\n", ksft_test_result(t->passed, "%s%s%s.%s\n",
f->name, variant->name[0] ? "." : "", variant->name, t->name); f->name, variant->name[0] ? "." : "", variant->name, t->name);
...@@ -999,6 +1008,7 @@ static int test_harness_run(int __attribute__((unused)) argc, ...@@ -999,6 +1008,7 @@ static int test_harness_run(int __attribute__((unused)) argc,
struct __fixture_variant_metadata no_variant = { .name = "", }; struct __fixture_variant_metadata no_variant = { .name = "", };
struct __fixture_variant_metadata *v; struct __fixture_variant_metadata *v;
struct __fixture_metadata *f; struct __fixture_metadata *f;
struct __test_results *results;
struct __test_metadata *t; struct __test_metadata *t;
int ret = 0; int ret = 0;
unsigned int case_count = 0, test_count = 0; unsigned int case_count = 0, test_count = 0;
...@@ -1013,6 +1023,9 @@ static int test_harness_run(int __attribute__((unused)) argc, ...@@ -1013,6 +1023,9 @@ static int test_harness_run(int __attribute__((unused)) argc,
} }
} }
results = mmap(NULL, sizeof(*results), PROT_READ | PROT_WRITE,
MAP_SHARED | MAP_ANONYMOUS, -1, 0);
ksft_print_header(); ksft_print_header();
ksft_set_plan(test_count); ksft_set_plan(test_count);
ksft_print_msg("Starting %u tests from %u test cases.\n", ksft_print_msg("Starting %u tests from %u test cases.\n",
...@@ -1021,7 +1034,9 @@ static int test_harness_run(int __attribute__((unused)) argc, ...@@ -1021,7 +1034,9 @@ static int test_harness_run(int __attribute__((unused)) argc,
for (v = f->variant ?: &no_variant; v; v = v->next) { for (v = f->variant ?: &no_variant; v; v = v->next) {
for (t = f->tests; t; t = t->next) { for (t = f->tests; t; t = t->next) {
count++; count++;
t->results = results;
__run_test(f, v, t); __run_test(f, v, t);
t->results = NULL;
if (t->passed) if (t->passed)
pass_count++; pass_count++;
else else
...@@ -1029,6 +1044,8 @@ static int test_harness_run(int __attribute__((unused)) argc, ...@@ -1029,6 +1044,8 @@ static int test_harness_run(int __attribute__((unused)) argc,
} }
} }
} }
munmap(results, sizeof(*results));
ksft_print_msg("%s: %u / %u tests passed.\n", ret ? "FAILED" : "PASSED", ksft_print_msg("%s: %u / %u tests passed.\n", ret ? "FAILED" : "PASSED",
pass_count, count); pass_count, count);
ksft_exit(ret == 0); ksft_exit(ret == 0);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment