Use nvmetcli to setup/cleanup a remote soft target. Signed-off-by: Daniel Wagner <dwagner@xxxxxxx> --- contrib/nvme_target_control.py | 110 +++++++++++++++++++++++++++++++++ contrib/nvmet-subsys.jinja2 | 71 +++++++++++++++++++++ 2 files changed, 181 insertions(+) create mode 100755 contrib/nvme_target_control.py create mode 100644 contrib/nvmet-subsys.jinja2 diff --git a/contrib/nvme_target_control.py b/contrib/nvme_target_control.py new file mode 100755 index 000000000000..97ed1c600dd2 --- /dev/null +++ b/contrib/nvme_target_control.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: GPL-3.0+ + +# blktests calls this script to setup/teardown remote targets. blktests passes +# all relevant information via the command line, e.g. --hostnqn. The interface +# between blktests and this script is 'documentent' here in build_parser +# function. +# +# This script uses nvmetcli to setup the remote target (it depends on the REST +# API feature [1]). There is not technical need for nvmetcli to use but it makes +# it simple to setup a remote Linux box. If you want to setup someting else +# you should to replace this part. +# +# There are couple of global configuration options which need to be set. +# Add ~/.config/blktests/nvme_target_control.toml file with something like: +# +# [main] +# nvmetcli='/usr/bin/nvmetcli' +# remote='http://nvmet.local:5000' +# +# And then start the nvmetcli server on the remote host. +# +# nvmetcli uses JSON configuration, thus this script creates a JSON configuration +# using a jinja2 template. After this step we simple have to set the blktests +# variable correctly and start blktests. +# +# $ host_ip4=$(/sbin/ip -o -4 addr list eth0 | awk '{print $4}' | cut -d/ -f1) +# $ NVME_TRTYPE=tcp NVME_NVMET=nvmet.local NVME_HOST_TRADDR=${host_ip4} \ +# NVME_TARGET_CONTROL=~/blktests/contrib/nvme_target_control.py ./check nvme +# +# [1] https://github.com/hreinecke/nvmetcli/tree/restapi + +import os +import tomllib +import argparse +import subprocess +from jinja2 import Environment, FileSystemLoader + + +XDG_CONFIG_HOME = os.environ.get("XDG_CONFIG_HOME") +if not XDG_CONFIG_HOME: + XDG_CONFIG_HOME = os.environ.get('HOME') + '/.config' + + +with open(f'{XDG_CONFIG_HOME}/blktests/nvme_target_control.toml', 'rb') as f: + config = tomllib.load(f) + nvmetcli = config['main']['nvmetcli'] + remote = config['main']['remote'] + + +def gen_conf(conf): + environment = Environment(loader=FileSystemLoader('.')) + template = environment.get_template('nvmet-subsys.jinja2') + filename = f'{conf["subsysnqn"]}.json' + content = template.render(conf) + with open(filename, mode='w', encoding='utf-8') as outfile: + outfile.write(content) + + +def target_setup(args): + conf = { + 'subsysnqn': args.subsysnqn, + 'subsys_uuid': args.subsys_uuid, + 'hostnqn': args.hostnqn, + 'allowed_hosts': args.hostnqn, + 'ctrlkey': args.ctrlkey, + 'hostkey': args.hostkey, + 'blkdev': '/dev/vdc' + } + + gen_conf(conf) + + subprocess.call(['python3', nvmetcli, '--remote=' + remote, + 'restore', args.subsysnqn + '.json']) + + +def target_cleanup(args): + subprocess.call(['python3', nvmetcli, '--remote=' + remote, + 'clear', args.subsysnqn + '.json']) + + +def build_parser(): + parser = argparse.ArgumentParser() + sub = parser.add_subparsers(required=True) + + setup = sub.add_parser('setup') + setup.add_argument('--subsysnqn', required=True) + setup.add_argument('--subsys-uuid', required=True) + setup.add_argument('--hostnqn', required=True) + setup.add_argument('--ctrlkey', default='') + setup.add_argument('--hostkey', default='') + setup.set_defaults(func=target_setup) + + cleanup = sub.add_parser('cleanup') + cleanup.add_argument('--subsysnqn', required=True) + cleanup.set_defaults(func=target_cleanup) + + return parser + + +def main(): + import sys + + parser = build_parser() + args = parser.parse_args() + args.func(args) + + +if __name__ == '__main__': + main() diff --git a/contrib/nvmet-subsys.jinja2 b/contrib/nvmet-subsys.jinja2 new file mode 100644 index 000000000000..a446fbd9b784 --- /dev/null +++ b/contrib/nvmet-subsys.jinja2 @@ -0,0 +1,71 @@ +{ + "hosts": [ + { + "nqn": "{{ hostnqn }}" + } + ], + "ports": [ + { + "addr": { + "adrfam": "ipv4", + "traddr": "0.0.0.0", + "treq": "not specified", + "trsvcid": "4420", + "trtype": "tcp", + "tsas": "none" + }, + "ana_groups": [ + { + "ana": { + "state": "optimized" + }, + "grpid": 1 + } + ], + "param": { + "inline_data_size": "16384", + "pi_enable": "0" + }, + "portid": 0, + "referrals": [], + "subsystems": [ + "{{ subsysnqn }}" + ] + } + ], + "subsystems": [ + { + "allowed_hosts": [ + "{{ allowed_hosts }}" + ], + "attr": { + "allow_any_host": "0", + "cntlid_max": "65519", + "cntlid_min": "1", + "firmware": "yada", + "ieee_oui": "0x000000", + "model": "Linux", + "pi_enable": "0", + "qid_max": "128", + "serial": "0c74361069d9db6c65ef", + "version": "1.3" + }, + "namespaces": [ + { + "ana": { + "grpid": "1" + }, + "ana_grpid": 1, + "device": { + "nguid": "00000000-0000-0000-0000-000000000000", + "path": "{{ blkdev }}", + "uuid": "{{ subsys_uuid }}" + }, + "enable": 1, + "nsid": 1 + } + ], + "nqn": "{{ subsysnqn }}" + } + ] +} -- 2.45.2