1#!/bin/sh
2# SPDX-License-Identifier: GPL-2.0-only
3
4# ftracetest - Ftrace test shell scripts
5#
6# Copyright (C) Hitachi Ltd., 2014
7#  Written by Masami Hiramatsu <masami.hiramatsu.pt@hitachi.com>
8#
9
10usage() { # errno [message]
11[ ! -z "$2" ] && echo $2
12echo "Usage: ftracetest [options] [testcase(s)] [testcase-directory(s)]"
13echo " Options:"
14echo "		-h|--help  Show help message"
15echo "		-k|--keep  Keep passed test logs"
16echo "		-K|--ktap  Output in KTAP format"
17echo "		-v|--verbose Increase verbosity of test messages"
18echo "		-vv        Alias of -v -v (Show all results in stdout)"
19echo "		-vvv       Alias of -v -v -v (Show all commands immediately)"
20echo "		--fail-unsupported Treat UNSUPPORTED as a failure"
21echo "		--fail-unresolved Treat UNRESOLVED as a failure"
22echo "		-d|--debug Debug mode (trace all shell commands)"
23echo "		-l|--logdir <dir> Save logs on the <dir>"
24echo "		            If <dir> is -, all logs output in console only"
25exit $1
26}
27
28# default error
29err_ret=1
30
31# kselftest skip code is 4
32err_skip=4
33
34# umount required
35UMOUNT_DIR=""
36
37# cgroup RT scheduling prevents chrt commands from succeeding, which
38# induces failures in test wakeup tests.  Disable for the duration of
39# the tests.
40
41readonly sched_rt_runtime=/proc/sys/kernel/sched_rt_runtime_us
42
43sched_rt_runtime_orig=$(cat $sched_rt_runtime)
44
45setup() {
46  echo -1 > $sched_rt_runtime
47}
48
49cleanup() {
50  echo $sched_rt_runtime_orig > $sched_rt_runtime
51  if [ -n "${UMOUNT_DIR}" ]; then
52    umount ${UMOUNT_DIR} ||:
53  fi
54}
55
56errexit() { # message
57  echo "Error: $1" 1>&2
58  cleanup
59  exit $err_ret
60}
61
62# Ensuring user privilege
63if [ `id -u` -ne 0 ]; then
64  errexit "this must be run by root user"
65fi
66
67setup
68
69# Utilities
70absdir() { # file_path
71  (cd `dirname $1`; pwd)
72}
73
74abspath() {
75  echo `absdir $1`/`basename $1`
76}
77
78find_testcases() { #directory
79  echo `find $1 -name \*.tc | sort`
80}
81
82parse_opts() { # opts
83  local OPT_TEST_CASES=
84  local OPT_TEST_DIR=
85
86  while [ ! -z "$1" ]; do
87    case "$1" in
88    --help|-h)
89      usage 0
90    ;;
91    --keep|-k)
92      KEEP_LOG=1
93      shift 1
94    ;;
95    --ktap|-K)
96      KTAP=1
97      shift 1
98    ;;
99    --verbose|-v|-vv|-vvv)
100      if [ $VERBOSE -eq -1 ]; then
101	usage "--console can not use with --verbose"
102      fi
103      VERBOSE=$((VERBOSE + 1))
104      [ $1 = '-vv' ] && VERBOSE=$((VERBOSE + 1))
105      [ $1 = '-vvv' ] && VERBOSE=$((VERBOSE + 2))
106      shift 1
107    ;;
108    --console)
109      if [ $VERBOSE -ne 0 ]; then
110	usage "--console can not use with --verbose"
111      fi
112      VERBOSE=-1
113      shift 1
114    ;;
115    --debug|-d)
116      DEBUG=1
117      shift 1
118    ;;
119    --stop-fail)
120      STOP_FAILURE=1
121      shift 1
122    ;;
123    --fail-unsupported)
124      UNSUPPORTED_RESULT=1
125      shift 1
126    ;;
127    --fail-unresolved)
128      UNRESOLVED_RESULT=1
129      shift 1
130    ;;
131    --logdir|-l)
132      LOG_DIR=$2
133      LINK_PTR=
134      shift 2
135    ;;
136    *.tc)
137      if [ -f "$1" ]; then
138        OPT_TEST_CASES="$OPT_TEST_CASES `abspath $1`"
139        shift 1
140      else
141        usage 1 "$1 is not a testcase"
142      fi
143      ;;
144    *)
145      if [ -d "$1" ]; then
146        OPT_TEST_DIR=`abspath $1`
147        OPT_TEST_CASES="$OPT_TEST_CASES `find_testcases $OPT_TEST_DIR`"
148        shift 1
149      else
150        usage 1 "Invalid option ($1)"
151      fi
152    ;;
153    esac
154  done
155  if [ ! -z "$OPT_TEST_CASES" ]; then
156    TEST_CASES=$OPT_TEST_CASES
157  fi
158}
159
160# Parameters
161TRACING_DIR=`grep tracefs /proc/mounts | cut -f2 -d' ' | head -1`
162if [ -z "$TRACING_DIR" ]; then
163    DEBUGFS_DIR=`grep debugfs /proc/mounts | cut -f2 -d' ' | head -1`
164    if [ -z "$DEBUGFS_DIR" ]; then
165	# If tracefs exists, then so does /sys/kernel/tracing
166	if [ -d "/sys/kernel/tracing" ]; then
167	    mount -t tracefs nodev /sys/kernel/tracing ||
168	      errexit "Failed to mount /sys/kernel/tracing"
169	    TRACING_DIR="/sys/kernel/tracing"
170	    UMOUNT_DIR=${TRACING_DIR}
171	# If debugfs exists, then so does /sys/kernel/debug
172	elif [ -d "/sys/kernel/debug" ]; then
173	    mount -t debugfs nodev /sys/kernel/debug ||
174	      errexit "Failed to mount /sys/kernel/debug"
175	    TRACING_DIR="/sys/kernel/debug/tracing"
176	    UMOUNT_DIR=${TRACING_DIR}
177	else
178	    err_ret=$err_skip
179	    errexit "debugfs and tracefs are not configured in this kernel"
180	fi
181    else
182	TRACING_DIR="$DEBUGFS_DIR/tracing"
183    fi
184fi
185if [ ! -d "$TRACING_DIR" ]; then
186    err_ret=$err_skip
187    errexit "ftrace is not configured in this kernel"
188fi
189
190TOP_DIR=`absdir $0`
191TEST_DIR=$TOP_DIR/test.d
192TEST_CASES=`find_testcases $TEST_DIR`
193LOG_TOP_DIR=$TOP_DIR/logs
194LOG_DATE=`date +%Y%m%d-%H%M%S`
195LOG_DIR=$LOG_TOP_DIR/$LOG_DATE/
196LINK_PTR=$LOG_TOP_DIR/latest
197KEEP_LOG=0
198KTAP=0
199DEBUG=0
200VERBOSE=0
201UNSUPPORTED_RESULT=0
202UNRESOLVED_RESULT=0
203STOP_FAILURE=0
204# Parse command-line options
205parse_opts $*
206
207[ $DEBUG -ne 0 ] && set -x
208
209# Verify parameters
210if [ -z "$TRACING_DIR" -o ! -d "$TRACING_DIR" ]; then
211  errexit "No ftrace directory found"
212fi
213
214# Preparing logs
215if [ "x$LOG_DIR" = "x-" ]; then
216  LOG_FILE=
217  date
218else
219  LOG_FILE=$LOG_DIR/ftracetest.log
220  mkdir -p $LOG_DIR || errexit "Failed to make a log directory: $LOG_DIR"
221  date > $LOG_FILE
222  if [ "x-$LINK_PTR" != "x-" ]; then
223    unlink $LINK_PTR
224    ln -fs $LOG_DATE $LINK_PTR
225  fi
226fi
227
228# Define text colors
229# Check available colors on the terminal, if any
230ncolors=`tput colors 2>/dev/null || echo 0`
231color_reset=
232color_red=
233color_green=
234color_blue=
235# If stdout exists and number of colors is eight or more, use them
236if [ -t 1 -a "$ncolors" -ge 8 ]; then
237  color_reset="\033[0m"
238  color_red="\033[31m"
239  color_green="\033[32m"
240  color_blue="\033[34m"
241fi
242
243strip_esc() {
244  # busybox sed implementation doesn't accept "\x1B", so use [:cntrl:] instead.
245  sed -E "s/[[:cntrl:]]\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]//g"
246}
247
248prlog() { # messages
249  newline="\n"
250  if [ "$1" = "-n" ] ; then
251    newline=
252    shift
253  fi
254  [ "$KTAP" != "1" ] && printf "$*$newline"
255  [ "$LOG_FILE" ] && printf "$*$newline" | strip_esc >> $LOG_FILE
256}
257catlog() { #file
258  if [ "${KTAP}" = "1" ]; then
259    cat $1 | while read line ; do
260      echo "# $line"
261    done
262  else
263    cat $1
264  fi
265  [ "$LOG_FILE" ] && cat $1 | strip_esc >> $LOG_FILE
266}
267prlog "=== Ftrace unit tests ==="
268
269
270# Testcase management
271# Test result codes - Dejagnu extended code
272PASS=0	# The test succeeded.
273FAIL=1	# The test failed, but was expected to succeed.
274UNRESOLVED=2  # The test produced indeterminate results. (e.g. interrupted)
275UNTESTED=3    # The test was not run, currently just a placeholder.
276UNSUPPORTED=4 # The test failed because of lack of feature.
277XFAIL=5	# The test failed, and was expected to fail.
278
279# Accumulations
280PASSED_CASES=
281FAILED_CASES=
282UNRESOLVED_CASES=
283UNTESTED_CASES=
284UNSUPPORTED_CASES=
285XFAILED_CASES=
286UNDEFINED_CASES=
287TOTAL_RESULT=0
288
289INSTANCE=
290CASENO=0
291CASENAME=
292
293testcase() { # testfile
294  CASENO=$((CASENO+1))
295  CASENAME=`grep "^#[ \t]*description:" $1 | cut -f2- -d:`
296}
297
298checkreq() { # testfile
299  requires=`grep "^#[ \t]*requires:" $1 | cut -f2- -d:`
300  # Use eval to pass quoted-patterns correctly.
301  eval check_requires "$requires"
302}
303
304test_on_instance() { # testfile
305  grep -q "^#[ \t]*flags:.*instance" $1
306}
307
308ktaptest() { # result comment
309  if [ "$KTAP" != "1" ]; then
310    return
311  fi
312
313  local result=
314  if [ "$1" = "1" ]; then
315    result="ok"
316  else
317    result="not ok"
318  fi
319  shift
320
321  local comment=$*
322  if [ "$comment" != "" ]; then
323    comment="# $comment"
324  fi
325
326  echo $result $CASENO $INSTANCE$CASENAME $comment
327}
328
329eval_result() { # sigval
330  case $1 in
331    $PASS)
332      prlog "	[${color_green}PASS${color_reset}]"
333      ktaptest 1
334      PASSED_CASES="$PASSED_CASES $CASENO"
335      return 0
336    ;;
337    $FAIL)
338      prlog "	[${color_red}FAIL${color_reset}]"
339      ktaptest 0
340      FAILED_CASES="$FAILED_CASES $CASENO"
341      return 1 # this is a bug.
342    ;;
343    $UNRESOLVED)
344      prlog "	[${color_blue}UNRESOLVED${color_reset}]"
345      ktaptest 0 UNRESOLVED
346      UNRESOLVED_CASES="$UNRESOLVED_CASES $CASENO"
347      return $UNRESOLVED_RESULT # depends on use case
348    ;;
349    $UNTESTED)
350      prlog "	[${color_blue}UNTESTED${color_reset}]"
351      ktaptest 1 SKIP
352      UNTESTED_CASES="$UNTESTED_CASES $CASENO"
353      return 0
354    ;;
355    $UNSUPPORTED)
356      prlog "	[${color_blue}UNSUPPORTED${color_reset}]"
357      ktaptest 1 SKIP
358      UNSUPPORTED_CASES="$UNSUPPORTED_CASES $CASENO"
359      return $UNSUPPORTED_RESULT # depends on use case
360    ;;
361    $XFAIL)
362      prlog "	[${color_green}XFAIL${color_reset}]"
363      ktaptest 1 XFAIL
364      XFAILED_CASES="$XFAILED_CASES $CASENO"
365      return 0
366    ;;
367    *)
368      prlog "	[${color_blue}UNDEFINED${color_reset}]"
369      ktaptest 0 error
370      UNDEFINED_CASES="$UNDEFINED_CASES $CASENO"
371      return 1 # this must be a test bug
372    ;;
373  esac
374}
375
376# Signal handling for result codes
377SIG_RESULT=
378SIG_BASE=36	# Use realtime signals
379SIG_PID=$$
380
381exit_pass () {
382  exit 0
383}
384
385SIG_FAIL=$((SIG_BASE + FAIL))
386exit_fail () {
387  exit 1
388}
389trap 'SIG_RESULT=$FAIL' $SIG_FAIL
390
391SIG_UNRESOLVED=$((SIG_BASE + UNRESOLVED))
392exit_unresolved () {
393  kill -s $SIG_UNRESOLVED $SIG_PID
394  exit 0
395}
396trap 'SIG_RESULT=$UNRESOLVED' $SIG_UNRESOLVED
397
398SIG_UNTESTED=$((SIG_BASE + UNTESTED))
399exit_untested () {
400  kill -s $SIG_UNTESTED $SIG_PID
401  exit 0
402}
403trap 'SIG_RESULT=$UNTESTED' $SIG_UNTESTED
404
405SIG_UNSUPPORTED=$((SIG_BASE + UNSUPPORTED))
406exit_unsupported () {
407  kill -s $SIG_UNSUPPORTED $SIG_PID
408  exit 0
409}
410trap 'SIG_RESULT=$UNSUPPORTED' $SIG_UNSUPPORTED
411
412SIG_XFAIL=$((SIG_BASE + XFAIL))
413exit_xfail () {
414  kill -s $SIG_XFAIL $SIG_PID
415  exit 0
416}
417trap 'SIG_RESULT=$XFAIL' $SIG_XFAIL
418
419__run_test() { # testfile
420  # setup PID and PPID, $$ is not updated.
421  (cd $TRACING_DIR; read PID _ < /proc/self/stat; set -e; set -x;
422   checkreq $1; initialize_ftrace; . $1)
423  [ $? -ne 0 ] && kill -s $SIG_FAIL $SIG_PID
424}
425
426# Run one test case
427run_test() { # testfile
428  local testname=`basename $1`
429  testcase $1
430  prlog -n "[$CASENO]$INSTANCE$CASENAME"
431  if [ ! -z "$LOG_FILE" ] ; then
432    local testlog=`mktemp $LOG_DIR/${CASENO}-${testname}-log.XXXXXX`
433  else
434    local testlog=/proc/self/fd/1
435  fi
436  export TMPDIR=`mktemp -d /tmp/ftracetest-dir.XXXXXX`
437  export FTRACETEST_ROOT=$TOP_DIR
438  echo "execute$INSTANCE: "$1 > $testlog
439  SIG_RESULT=0
440  if [ $VERBOSE -eq -1 ]; then
441    __run_test $1
442  elif [ -z "$LOG_FILE" ]; then
443    __run_test $1 2>&1
444  elif [ $VERBOSE -ge 3 ]; then
445    __run_test $1 | tee -a $testlog 2>&1
446  elif [ $VERBOSE -eq 2 ]; then
447    __run_test $1 2>> $testlog | tee -a $testlog
448  else
449    __run_test $1 >> $testlog 2>&1
450  fi
451  eval_result $SIG_RESULT
452  if [ $? -eq 0 ]; then
453    # Remove test log if the test was done as it was expected.
454    [ $KEEP_LOG -eq 0 -a ! -z "$LOG_FILE" ] && rm $testlog
455  else
456    [ $VERBOSE -eq 1 -o $VERBOSE -eq 2 ] && catlog $testlog
457    TOTAL_RESULT=1
458  fi
459  rm -rf $TMPDIR
460}
461
462# load in the helper functions
463. $TEST_DIR/functions
464
465if [ "$KTAP" = "1" ]; then
466  echo "TAP version 13"
467
468  casecount=`echo $TEST_CASES | wc -w`
469  for t in $TEST_CASES; do
470    test_on_instance $t || continue
471    casecount=$((casecount+1))
472  done
473  echo "1..${casecount}"
474fi
475
476# Main loop
477for t in $TEST_CASES; do
478  run_test $t
479  if [ $STOP_FAILURE -ne 0 -a $TOTAL_RESULT -ne 0 ]; then
480    echo "A failure detected. Stop test."
481    exit 1
482  fi
483done
484
485# Test on instance loop
486INSTANCE=" (instance) "
487for t in $TEST_CASES; do
488  test_on_instance $t || continue
489  SAVED_TRACING_DIR=$TRACING_DIR
490  export TRACING_DIR=`mktemp -d $TRACING_DIR/instances/ftracetest.XXXXXX`
491  run_test $t
492  rmdir $TRACING_DIR
493  TRACING_DIR=$SAVED_TRACING_DIR
494  if [ $STOP_FAILURE -ne 0 -a $TOTAL_RESULT -ne 0 ]; then
495    echo "A failure detected. Stop test."
496    exit 1
497  fi
498done
499(cd $TRACING_DIR; finish_ftrace) # for cleanup
500
501prlog ""
502prlog "# of passed: " `echo $PASSED_CASES | wc -w`
503prlog "# of failed: " `echo $FAILED_CASES | wc -w`
504prlog "# of unresolved: " `echo $UNRESOLVED_CASES | wc -w`
505prlog "# of untested: " `echo $UNTESTED_CASES | wc -w`
506prlog "# of unsupported: " `echo $UNSUPPORTED_CASES | wc -w`
507prlog "# of xfailed: " `echo $XFAILED_CASES | wc -w`
508prlog "# of undefined(test bug): " `echo $UNDEFINED_CASES | wc -w`
509
510if [ "$KTAP" = "1" ]; then
511  echo -n "# Totals:"
512  echo -n " pass:"`echo $PASSED_CASES | wc -w`
513  echo -n " fail:"`echo $FAILED_CASES | wc -w`
514  echo -n " xfail:"`echo $XFAILED_CASES | wc -w`
515  echo -n " xpass:0"
516  echo -n " skip:"`echo $UNTESTED_CASES $UNSUPPORTED_CASES | wc -w`
517  echo -n " error:"`echo $UNRESOLVED_CASES $UNDEFINED_CASES | wc -w`
518  echo
519fi
520
521cleanup
522
523# if no error, return 0
524exit $TOTAL_RESULT
525