summaryrefslogtreecommitdiff
path: root/tools/testing/selftests/net/lib/py/ksft.py
diff options
context:
space:
mode:
authorDavid S. Miller <davem@davemloft.net>2024-04-08 11:40:41 +0100
committerDavid S. Miller <davem@davemloft.net>2024-04-08 11:40:41 +0100
commita15d80a16dbf697f8953e118eb5099450db2b83c (patch)
treef14ba4546dfd3423aad6315ec710ee8b926ae852 /tools/testing/selftests/net/lib/py/ksft.py
parentd7d6e47016bc536b3443fc16ee694743bfb109c7 (diff)
parentf0e6c86e4bab228ca51b863af180ade0a970a393 (diff)
Merge branch 'ynl-tests'
Jakub Kicinski says: ==================== selftests: net: groundwork for YNL-based tests Currently the options for writing networking tests are C, bash or some mix of the two. YAML/Netlink gives us the ability to easily interface with Netlink in higher level laguages. In particular, there is a Python library already available in tree, under tools/net. Add the scaffolding which allows writing tests using this library. The "scaffolding" is needed because the library lives under tools/net and uses YAML files from under Documentation/. So we need a small amount of glue code to find those things and add them to TEST_FILES. This series adds both a basic SW sanity test and driver test which can be run against netdevsim or a real device. When I develop core code I usually test with netdevsim, then a real device, and then a backport to Meta's kernel. Because of the lack of integration, until now I had to throw away the (YNL-based) test script and netdevsim code. Running tests in tree directly: $ ./tools/testing/selftests/net/nl_netdev.py KTAP version 1 1..2 ok 1 nl_netdev.empty_check ok 2 nl_netdev.lo_check # Totals: pass:2 fail:0 xfail:0 xpass:0 skip:0 error:0 in tree via make: $ make -C tools/testing/selftests/ TARGETS=net \ TEST_PROGS=nl_netdev.py TEST_GEN_PROGS="" run_tests [ ... ] and installed externally, all seem to work: $ make -C tools/testing/selftests/ TARGETS=net \ install INSTALL_PATH=/tmp/ksft-net $ /tmp/ksft-net/run_kselftest.sh -t net:nl_netdev.py [ ... ] For driver tests I followed the lead of net/forwarding and get the device name from env and/or a config file. v3: - fix up netdevsim C - various small nits in other patches (see changelog in patches) v2: https://lore.kernel.org/all/20240403023426.1762996-1-kuba@kernel.org/ - don't add to TARGETS, create a deperate variable with deps - support and use with - support and use passing arguments to tests v1: https://lore.kernel.org/all/20240402010520.1209517-1-kuba@kernel.org/ ==================== Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'tools/testing/selftests/net/lib/py/ksft.py')
-rw-r--r--tools/testing/selftests/net/lib/py/ksft.py96
1 files changed, 96 insertions, 0 deletions
diff --git a/tools/testing/selftests/net/lib/py/ksft.py b/tools/testing/selftests/net/lib/py/ksft.py
new file mode 100644
index 000000000000..c7210525981c
--- /dev/null
+++ b/tools/testing/selftests/net/lib/py/ksft.py
@@ -0,0 +1,96 @@
+# SPDX-License-Identifier: GPL-2.0
+
+import builtins
+from .consts import KSFT_MAIN_NAME
+
+KSFT_RESULT = None
+
+
+class KsftSkipEx(Exception):
+ pass
+
+
+class KsftXfailEx(Exception):
+ pass
+
+
+def ksft_pr(*objs, **kwargs):
+ print("#", *objs, **kwargs)
+
+
+def ksft_eq(a, b, comment=""):
+ global KSFT_RESULT
+ if a != b:
+ KSFT_RESULT = False
+ ksft_pr("Check failed", a, "!=", b, comment)
+
+
+def ksft_true(a, comment=""):
+ global KSFT_RESULT
+ if not a:
+ KSFT_RESULT = False
+ ksft_pr("Check failed", a, "does not eval to True", comment)
+
+
+def ksft_in(a, b, comment=""):
+ global KSFT_RESULT
+ if a not in b:
+ KSFT_RESULT = False
+ ksft_pr("Check failed", a, "not in", b, comment)
+
+
+def ksft_ge(a, b, comment=""):
+ global KSFT_RESULT
+ if a < b:
+ KSFT_RESULT = False
+ ksft_pr("Check failed", a, "<", b, comment)
+
+
+def ktap_result(ok, cnt=1, case="", comment=""):
+ res = ""
+ if not ok:
+ res += "not "
+ res += "ok "
+ res += str(cnt) + " "
+ res += KSFT_MAIN_NAME
+ if case:
+ res += "." + str(case.__name__)
+ if comment:
+ res += " # " + comment
+ print(res)
+
+
+def ksft_run(cases, args=()):
+ totals = {"pass": 0, "fail": 0, "skip": 0, "xfail": 0}
+
+ print("KTAP version 1")
+ print("1.." + str(len(cases)))
+
+ global KSFT_RESULT
+ cnt = 0
+ for case in cases:
+ KSFT_RESULT = True
+ cnt += 1
+ try:
+ case(*args)
+ except KsftSkipEx as e:
+ ktap_result(True, cnt, case, comment="SKIP " + str(e))
+ totals['skip'] += 1
+ continue
+ except KsftXfailEx as e:
+ ktap_result(True, cnt, case, comment="XFAIL " + str(e))
+ totals['xfail'] += 1
+ continue
+ except Exception as e:
+ for line in str(e).split('\n'):
+ ksft_pr("Exception|", line)
+ ktap_result(False, cnt, case)
+ totals['fail'] += 1
+ continue
+
+ ktap_result(KSFT_RESULT, cnt, case)
+ totals['pass'] += 1
+
+ print(
+ f"# Totals: pass:{totals['pass']} fail:{totals['fail']} xfail:{totals['xfail']} xpass:0 skip:{totals['skip']} error:0"
+ )