summaryrefslogtreecommitdiffstats
path: root/tools/testing
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2014-10-12 07:33:37 -0400
committerLinus Torvalds <torvalds@linux-foundation.org>2014-10-12 07:33:37 -0400
commit90eac7eee2f4257644dcfb9d22348fded7c24afd (patch)
treeea472521485299a830095ea341f263cfb0425f1e /tools/testing
parent6bbcb1d3a2e0a31593e3b7d9bfd112fa7f447141 (diff)
parent89c5497d1f933af56dac617f0fd86150942a7fb6 (diff)
Merge tag 'ftracetest-3.18' of git://git.kernel.org/pub/scm/linux/kernel/git/rostedt/linux-trace
Pull ftrace test code from Steven Rostedt: "This patch series starts a new selftests section in the tools/testing/selftest directory called "ftrace" that holds tests aimed at testing ftrace and subsystems that use ftrace (like kprobes). So far only a few tests were written (by Masami Hiramatsu), but more will be added in the near future (3.19)" * tag 'ftracetest-3.18' of git://git.kernel.org/pub/scm/linux/kernel/git/rostedt/linux-trace: tracing/kprobes: Add selftest scripts testing kprobe-tracer as startup test ftracetest: Add POSIX.3 standard and XFAIL result codes ftracetest: Add kprobe basic testcases ftracetest: Add ftrace basic testcases ftracetest: Initial commit for ftracetest
Diffstat (limited to 'tools/testing')
-rw-r--r--tools/testing/selftests/Makefile1
-rw-r--r--tools/testing/selftests/ftrace/Makefile7
-rw-r--r--tools/testing/selftests/ftrace/README82
-rwxr-xr-xtools/testing/selftests/ftrace/ftracetest253
-rw-r--r--tools/testing/selftests/ftrace/samples/fail.tc4
-rw-r--r--tools/testing/selftests/ftrace/samples/pass.tc3
-rw-r--r--tools/testing/selftests/ftrace/samples/unresolved.tc4
-rw-r--r--tools/testing/selftests/ftrace/samples/unsupported.tc3
-rw-r--r--tools/testing/selftests/ftrace/samples/untested.tc3
-rw-r--r--tools/testing/selftests/ftrace/samples/xfail.tc3
-rw-r--r--tools/testing/selftests/ftrace/test.d/00basic/basic1.tc3
-rw-r--r--tools/testing/selftests/ftrace/test.d/00basic/basic2.tc7
-rw-r--r--tools/testing/selftests/ftrace/test.d/00basic/basic3.tc8
-rw-r--r--tools/testing/selftests/ftrace/test.d/kprobe/add_and_remove.tc11
-rw-r--r--tools/testing/selftests/ftrace/test.d/kprobe/busy_check.tc13
-rw-r--r--tools/testing/selftests/ftrace/test.d/kprobe/kprobe_args.tc16
-rw-r--r--tools/testing/selftests/ftrace/test.d/kprobe/kretprobe_args.tc15
-rw-r--r--tools/testing/selftests/ftrace/test.d/template9
18 files changed, 445 insertions, 0 deletions
diff --git a/tools/testing/selftests/Makefile b/tools/testing/selftests/Makefile
index 36ff2e4c7b6..45f145c6f84 100644
--- a/tools/testing/selftests/Makefile
+++ b/tools/testing/selftests/Makefile
@@ -14,6 +14,7 @@ TARGETS += powerpc
TARGETS += user
TARGETS += sysctl
TARGETS += firmware
+TARGETS += ftrace
TARGETS_HOTPLUG = cpu-hotplug
TARGETS_HOTPLUG += memory-hotplug
diff --git a/tools/testing/selftests/ftrace/Makefile b/tools/testing/selftests/ftrace/Makefile
new file mode 100644
index 00000000000..76cc9f15626
--- /dev/null
+++ b/tools/testing/selftests/ftrace/Makefile
@@ -0,0 +1,7 @@
+all:
+
+run_tests:
+ @/bin/sh ./ftracetest || echo "ftrace selftests: [FAIL]"
+
+clean:
+ rm -rf logs/*
diff --git a/tools/testing/selftests/ftrace/README b/tools/testing/selftests/ftrace/README
new file mode 100644
index 00000000000..182e76fa4b8
--- /dev/null
+++ b/tools/testing/selftests/ftrace/README
@@ -0,0 +1,82 @@
+Linux Ftrace Testcases
+
+This is a collection of testcases for ftrace tracing feature in the Linux
+kernel. Since ftrace exports interfaces via the debugfs, we just need
+shell scripts for testing. Feel free to add new test cases.
+
+Running the ftrace testcases
+============================
+
+At first, you need to be the root user to run this script.
+To run all testcases:
+
+ $ sudo ./ftracetest
+
+To run specific testcases:
+
+ # ./ftracetest test.d/basic3.tc
+
+Or you can also run testcases under given directory:
+
+ # ./ftracetest test.d/kprobe/
+
+Contributing new testcases
+==========================
+
+Copy test.d/template to your testcase (whose filename must have *.tc
+extension) and rewrite the test description line.
+
+ * The working directory of the script is <debugfs>/tracing/.
+
+ * Take care with side effects as the tests are run with root privilege.
+
+ * The tests should not run for a long period of time (more than 1 min.)
+ These are to be unit tests.
+
+ * You can add a directory for your testcases under test.d/ if needed.
+
+ * The test cases should run on dash (busybox shell) for testing on
+ minimal cross-build environments.
+
+ * Note that the tests are run with "set -e" (errexit) option. If any
+ command fails, the test will be terminated immediately.
+
+ * The tests can return some result codes instead of pass or fail by
+ using exit_unresolved, exit_untested, exit_unsupported and exit_xfail.
+
+Result code
+===========
+
+Ftracetest supports following result codes.
+
+ * PASS: The test succeeded as expected. The test which exits with 0 is
+ counted as passed test.
+
+ * FAIL: The test failed, but was expected to succeed. The test which exits
+ with !0 is counted as failed test.
+
+ * UNRESOLVED: The test produced unclear or intermidiate results.
+ for example, the test was interrupted
+ or the test depends on a previous test, which failed.
+ or the test was set up incorrectly
+ The test which is in above situation, must call exit_unresolved.
+
+ * UNTESTED: The test was not run, currently just a placeholder.
+ In this case, the test must call exit_untested.
+
+ * UNSUPPORTED: The test failed because of lack of feature.
+ In this case, the test must call exit_unsupported.
+
+ * XFAIL: The test failed, and was expected to fail.
+ To return XFAIL, call exit_xfail from the test.
+
+There are some sample test scripts for result code under samples/.
+You can also run samples as below:
+
+ # ./ftracetest samples/
+
+TODO
+====
+
+ * Fancy colored output :)
+
diff --git a/tools/testing/selftests/ftrace/ftracetest b/tools/testing/selftests/ftrace/ftracetest
new file mode 100755
index 00000000000..a8f81c78285
--- /dev/null
+++ b/tools/testing/selftests/ftrace/ftracetest
@@ -0,0 +1,253 @@
+#!/bin/sh
+
+# ftracetest - Ftrace test shell scripts
+#
+# Copyright (C) Hitachi Ltd., 2014
+# Written by Masami Hiramatsu <masami.hiramatsu.pt@hitachi.com>
+#
+# Released under the terms of the GPL v2.
+
+usage() { # errno [message]
+[ "$2" ] && echo $2
+echo "Usage: ftracetest [options] [testcase(s)] [testcase-directory(s)]"
+echo " Options:"
+echo " -h|--help Show help message"
+echo " -k|--keep Keep passed test logs"
+echo " -d|--debug Debug mode (trace all shell commands)"
+exit $1
+}
+
+errexit() { # message
+ echo "Error: $1" 1>&2
+ exit 1
+}
+
+# Ensuring user privilege
+if [ `id -u` -ne 0 ]; then
+ errexit "this must be run by root user"
+fi
+
+# Utilities
+absdir() { # file_path
+ (cd `dirname $1`; pwd)
+}
+
+abspath() {
+ echo `absdir $1`/`basename $1`
+}
+
+find_testcases() { #directory
+ echo `find $1 -name \*.tc`
+}
+
+parse_opts() { # opts
+ local OPT_TEST_CASES=
+ local OPT_TEST_DIR=
+
+ while [ "$1" ]; do
+ case "$1" in
+ --help|-h)
+ usage 0
+ ;;
+ --keep|-k)
+ KEEP_LOG=1
+ shift 1
+ ;;
+ --debug|-d)
+ DEBUG=1
+ shift 1
+ ;;
+ *.tc)
+ if [ -f "$1" ]; then
+ OPT_TEST_CASES="$OPT_TEST_CASES `abspath $1`"
+ shift 1
+ else
+ usage 1 "$1 is not a testcase"
+ fi
+ ;;
+ *)
+ if [ -d "$1" ]; then
+ OPT_TEST_DIR=`abspath $1`
+ OPT_TEST_CASES="$OPT_TEST_CASES `find_testcases $OPT_TEST_DIR`"
+ shift 1
+ else
+ usage 1 "Invalid option ($1)"
+ fi
+ ;;
+ esac
+ done
+ if [ "$OPT_TEST_CASES" ]; then
+ TEST_CASES=$OPT_TEST_CASES
+ fi
+}
+
+# Parameters
+DEBUGFS_DIR=`grep debugfs /proc/mounts | cut -f2 -d' '`
+TRACING_DIR=$DEBUGFS_DIR/tracing
+TOP_DIR=`absdir $0`
+TEST_DIR=$TOP_DIR/test.d
+TEST_CASES=`find_testcases $TEST_DIR`
+LOG_DIR=$TOP_DIR/logs/`date +%Y%m%d-%H%M%S`/
+KEEP_LOG=0
+DEBUG=0
+# Parse command-line options
+parse_opts $*
+
+[ $DEBUG -ne 0 ] && set -x
+
+# Verify parameters
+if [ -z "$DEBUGFS_DIR" -o ! -d "$TRACING_DIR" ]; then
+ errexit "No ftrace directory found"
+fi
+
+# Preparing logs
+LOG_FILE=$LOG_DIR/ftracetest.log
+mkdir -p $LOG_DIR || errexit "Failed to make a log directory: $LOG_DIR"
+date > $LOG_FILE
+prlog() { # messages
+ echo "$@" | tee -a $LOG_FILE
+}
+catlog() { #file
+ cat $1 | tee -a $LOG_FILE
+}
+prlog "=== Ftrace unit tests ==="
+
+
+# Testcase management
+# Test result codes - Dejagnu extended code
+PASS=0 # The test succeeded.
+FAIL=1 # The test failed, but was expected to succeed.
+UNRESOLVED=2 # The test produced indeterminate results. (e.g. interrupted)
+UNTESTED=3 # The test was not run, currently just a placeholder.
+UNSUPPORTED=4 # The test failed because of lack of feature.
+XFAIL=5 # The test failed, and was expected to fail.
+
+# Accumulations
+PASSED_CASES=
+FAILED_CASES=
+UNRESOLVED_CASES=
+UNTESTED_CASES=
+UNSUPPORTED_CASES=
+XFAILED_CASES=
+UNDEFINED_CASES=
+TOTAL_RESULT=0
+
+CASENO=0
+testcase() { # testfile
+ CASENO=$((CASENO+1))
+ prlog -n "[$CASENO]"`grep "^#[ \t]*description:" $1 | cut -f2 -d:`
+}
+
+eval_result() { # retval sigval
+ local retval=$2
+ if [ $2 -eq 0 ]; then
+ test $1 -ne 0 && retval=$FAIL
+ fi
+ case $retval in
+ $PASS)
+ prlog " [PASS]"
+ PASSED_CASES="$PASSED_CASES $CASENO"
+ return 0
+ ;;
+ $FAIL)
+ prlog " [FAIL]"
+ FAILED_CASES="$FAILED_CASES $CASENO"
+ return 1 # this is a bug.
+ ;;
+ $UNRESOLVED)
+ prlog " [UNRESOLVED]"
+ UNRESOLVED_CASES="$UNRESOLVED_CASES $CASENO"
+ return 1 # this is a kind of bug.. something happened.
+ ;;
+ $UNTESTED)
+ prlog " [UNTESTED]"
+ UNTESTED_CASES="$UNTESTED_CASES $CASENO"
+ return 0
+ ;;
+ $UNSUPPORTED)
+ prlog " [UNSUPPORTED]"
+ UNSUPPORTED_CASES="$UNSUPPORTED_CASES $CASENO"
+ return 1 # this is not a bug, but the result should be reported.
+ ;;
+ $XFAIL)
+ prlog " [XFAIL]"
+ XFAILED_CASES="$XFAILED_CASES $CASENO"
+ return 0
+ ;;
+ *)
+ prlog " [UNDEFINED]"
+ UNDEFINED_CASES="$UNDEFINED_CASES $CASENO"
+ return 1 # this must be a test bug
+ ;;
+ esac
+}
+
+# Signal handling for result codes
+SIG_RESULT=
+SIG_BASE=36 # Use realtime signals
+SIG_PID=$$
+
+SIG_UNRESOLVED=$((SIG_BASE + UNRESOLVED))
+exit_unresolved () {
+ kill -s $SIG_UNRESOLVED $SIG_PID
+ exit 0
+}
+trap 'SIG_RESULT=$UNRESOLVED' $SIG_UNRESOLVED
+
+SIG_UNTESTED=$((SIG_BASE + UNTESTED))
+exit_untested () {
+ kill -s $SIG_UNTESTED $SIG_PID
+ exit 0
+}
+trap 'SIG_RESULT=$UNTESTED' $SIG_UNTESTED
+
+SIG_UNSUPPORTED=$((SIG_BASE + UNSUPPORTED))
+exit_unsupported () {
+ kill -s $SIG_UNSUPPORTED $SIG_PID
+ exit 0
+}
+trap 'SIG_RESULT=$UNSUPPORTED' $SIG_UNSUPPORTED
+
+SIG_XFAIL=$((SIG_BASE + XFAIL))
+exit_xfail () {
+ kill -s $SIG_XFAIL $SIG_PID
+ exit 0
+}
+trap 'SIG_RESULT=$XFAIL' $SIG_XFAIL
+
+# Run one test case
+run_test() { # testfile
+ local testname=`basename $1`
+ local testlog=`mktemp --tmpdir=$LOG_DIR ${testname}-XXXXXX.log`
+ testcase $1
+ echo "execute: "$1 > $testlog
+ SIG_RESULT=0
+ # setup PID and PPID, $$ is not updated.
+ (cd $TRACING_DIR; read PID _ < /proc/self/stat ;
+ set -e; set -x; . $1) >> $testlog 2>&1
+ eval_result $? $SIG_RESULT
+ if [ $? -eq 0 ]; then
+ # Remove test log if the test was done as it was expected.
+ [ $KEEP_LOG -eq 0 ] && rm $testlog
+ else
+ catlog $testlog
+ TOTAL_RESULT=1
+ fi
+}
+
+# Main loop
+for t in $TEST_CASES; do
+ run_test $t
+done
+
+prlog ""
+prlog "# of passed: " `echo $PASSED_CASES | wc -w`
+prlog "# of failed: " `echo $FAILED_CASES | wc -w`
+prlog "# of unresolved: " `echo $UNRESOLVED_CASES | wc -w`
+prlog "# of untested: " `echo $UNTESTED_CASES | wc -w`
+prlog "# of unsupported: " `echo $UNSUPPORTED_CASES | wc -w`
+prlog "# of xfailed: " `echo $XFAILED_CASES | wc -w`
+prlog "# of undefined(test bug): " `echo $UNDEFINED_CASES | wc -w`
+
+# if no error, return 0
+exit $TOTAL_RESULT
diff --git a/tools/testing/selftests/ftrace/samples/fail.tc b/tools/testing/selftests/ftrace/samples/fail.tc
new file mode 100644
index 00000000000..15e35b956e0
--- /dev/null
+++ b/tools/testing/selftests/ftrace/samples/fail.tc
@@ -0,0 +1,4 @@
+#!/bin/sh
+# description: failure-case example
+cat non-exist-file
+echo "this is not executed"
diff --git a/tools/testing/selftests/ftrace/samples/pass.tc b/tools/testing/selftests/ftrace/samples/pass.tc
new file mode 100644
index 00000000000..d0154937004
--- /dev/null
+++ b/tools/testing/selftests/ftrace/samples/pass.tc
@@ -0,0 +1,3 @@
+#!/bin/sh
+# description: pass-case example
+return 0
diff --git a/tools/testing/selftests/ftrace/samples/unresolved.tc b/tools/testing/selftests/ftrace/samples/unresolved.tc
new file mode 100644
index 00000000000..41e99d3358d
--- /dev/null
+++ b/tools/testing/selftests/ftrace/samples/unresolved.tc
@@ -0,0 +1,4 @@
+#!/bin/sh
+# description: unresolved-case example
+trap exit_unresolved INT
+kill -INT $PID
diff --git a/tools/testing/selftests/ftrace/samples/unsupported.tc b/tools/testing/selftests/ftrace/samples/unsupported.tc
new file mode 100644
index 00000000000..45910ff1332
--- /dev/null
+++ b/tools/testing/selftests/ftrace/samples/unsupported.tc
@@ -0,0 +1,3 @@
+#!/bin/sh
+# description: unsupported-case example
+exit_unsupported
diff --git a/tools/testing/selftests/ftrace/samples/untested.tc b/tools/testing/selftests/ftrace/samples/untested.tc
new file mode 100644
index 00000000000..35a45946ec6
--- /dev/null
+++ b/tools/testing/selftests/ftrace/samples/untested.tc
@@ -0,0 +1,3 @@
+#!/bin/sh
+# description: untested-case example
+exit_untested
diff --git a/tools/testing/selftests/ftrace/samples/xfail.tc b/tools/testing/selftests/ftrace/samples/xfail.tc
new file mode 100644
index 00000000000..9dd39532325
--- /dev/null
+++ b/tools/testing/selftests/ftrace/samples/xfail.tc
@@ -0,0 +1,3 @@
+#!/bin/sh
+# description: xfail-case example
+cat non-exist-file || exit_xfail
diff --git a/tools/testing/selftests/ftrace/test.d/00basic/basic1.tc b/tools/testing/selftests/ftrace/test.d/00basic/basic1.tc
new file mode 100644
index 00000000000..9980ff14ae4
--- /dev/null
+++ b/tools/testing/selftests/ftrace/test.d/00basic/basic1.tc
@@ -0,0 +1,3 @@
+#!/bin/sh
+# description: Basic trace file check
+test -f README -a -f trace -a -f tracing_on -a -f trace_pipe
diff --git a/tools/testing/selftests/ftrace/test.d/00basic/basic2.tc b/tools/testing/selftests/ftrace/test.d/00basic/basic2.tc
new file mode 100644
index 00000000000..bf9a7b03792
--- /dev/null
+++ b/tools/testing/selftests/ftrace/test.d/00basic/basic2.tc
@@ -0,0 +1,7 @@
+#!/bin/sh
+# description: Basic test for tracers
+test -f available_tracers
+for t in `cat available_tracers`; do
+ echo $t > current_tracer
+done
+echo nop > current_tracer
diff --git a/tools/testing/selftests/ftrace/test.d/00basic/basic3.tc b/tools/testing/selftests/ftrace/test.d/00basic/basic3.tc
new file mode 100644
index 00000000000..bde6625d978
--- /dev/null
+++ b/tools/testing/selftests/ftrace/test.d/00basic/basic3.tc
@@ -0,0 +1,8 @@
+#!/bin/sh
+# description: Basic trace clock test
+test -f trace_clock
+for c in `cat trace_clock | tr -d \[\]`; do
+ echo $c > trace_clock
+ grep '\['$c'\]' trace_clock
+done
+echo local > trace_clock
diff --git a/tools/testing/selftests/ftrace/test.d/kprobe/add_and_remove.tc b/tools/testing/selftests/ftrace/test.d/kprobe/add_and_remove.tc
new file mode 100644
index 00000000000..1b8b665ab2b
--- /dev/null
+++ b/tools/testing/selftests/ftrace/test.d/kprobe/add_and_remove.tc
@@ -0,0 +1,11 @@
+#!/bin/sh
+# description: Kprobe dynamic event - adding and removing
+
+[ -f kprobe_events ] || exit_unsupported # this is configurable
+
+echo 0 > events/enable
+echo > kprobe_events
+echo p:myevent do_fork > kprobe_events
+grep myevent kprobe_events
+test -d events/kprobes/myevent
+echo > kprobe_events
diff --git a/tools/testing/selftests/ftrace/test.d/kprobe/busy_check.tc b/tools/testing/selftests/ftrace/test.d/kprobe/busy_check.tc
new file mode 100644
index 00000000000..b55c8400358
--- /dev/null
+++ b/tools/testing/selftests/ftrace/test.d/kprobe/busy_check.tc
@@ -0,0 +1,13 @@
+#!/bin/sh
+# description: Kprobe dynamic event - busy event check
+
+[ -f kprobe_events ] || exit_unsupported
+
+echo 0 > events/enable
+echo > kprobe_events
+echo p:myevent do_fork > kprobe_events
+test -d events/kprobes/myevent
+echo 1 > events/kprobes/myevent/enable
+echo > kprobe_events && exit 1 # this must fail
+echo 0 > events/kprobes/myevent/enable
+echo > kprobe_events # this must succeed
diff --git a/tools/testing/selftests/ftrace/test.d/kprobe/kprobe_args.tc b/tools/testing/selftests/ftrace/test.d/kprobe/kprobe_args.tc
new file mode 100644
index 00000000000..a603d3f8db7
--- /dev/null
+++ b/tools/testing/selftests/ftrace/test.d/kprobe/kprobe_args.tc
@@ -0,0 +1,16 @@
+#!/bin/sh
+# description: Kprobe dynamic event with arguments
+
+[ -f kprobe_events ] || exit_unsupported # this is configurable
+
+echo 0 > events/enable
+echo > kprobe_events
+echo 'p:testprobe do_fork $stack $stack0 +0($stack)' > kprobe_events
+grep testprobe kprobe_events
+test -d events/kprobes/testprobe
+echo 1 > events/kprobes/testprobe/enable
+( echo "forked")
+echo 0 > events/kprobes/testprobe/enable
+echo "-:testprobe" >> kprobe_events
+test -d events/kprobes/testprobe && exit 1 || exit 0
+
diff --git a/tools/testing/selftests/ftrace/test.d/kprobe/kretprobe_args.tc b/tools/testing/selftests/ftrace/test.d/kprobe/kretprobe_args.tc
new file mode 100644
index 00000000000..283c29e7f7c
--- /dev/null
+++ b/tools/testing/selftests/ftrace/test.d/kprobe/kretprobe_args.tc
@@ -0,0 +1,15 @@
+#!/bin/sh
+# description: Kretprobe dynamic event with arguments
+
+[ -f kprobe_events ] || exit_unsupported # this is configurable
+
+echo 0 > events/enable
+echo > kprobe_events
+echo 'r:testprobe2 do_fork $retval' > kprobe_events
+grep testprobe2 kprobe_events
+test -d events/kprobes/testprobe2
+echo 1 > events/kprobes/testprobe2/enable
+( echo "forked")
+echo 0 > events/kprobes/testprobe2/enable
+echo '-:testprobe2' >> kprobe_events
+test -d events/kprobes/testprobe2 && exit 1 || exit 0
diff --git a/tools/testing/selftests/ftrace/test.d/template b/tools/testing/selftests/ftrace/test.d/template
new file mode 100644
index 00000000000..5448f7abad5
--- /dev/null
+++ b/tools/testing/selftests/ftrace/test.d/template
@@ -0,0 +1,9 @@
+#!/bin/sh
+# description: %HERE DESCRIBE WHAT THIS DOES%
+# you have to add ".tc" extention for your testcase file
+# Note that all tests are run with "errexit" option.
+
+exit 0 # Return 0 if the test is passed, otherwise return !0
+# If the test could not run because of lack of feature, call exit_unsupported
+# If the test returned unclear results, call exit_unresolved
+# If the test is a dummy, or a placeholder, call exit_untested