diff --git a/mindspore-lite/test/st/python/python_api/conftest.py b/mindspore-lite/test/st/python/python_api/conftest.py index 4a942d21e922601a90aee0c04c1d1b32b37c8885..601d20b9f71cae6792ac421fc3dd373a01fbe486 100644 --- a/mindspore-lite/test/st/python/python_api/conftest.py +++ b/mindspore-lite/test/st/python/python_api/conftest.py @@ -40,6 +40,33 @@ def pytest_addoption(parser): help="Available device ids for test, default is [0]. Example: --device_id 0 1", ) + parser.addoption( + "--mindir_dir", + action="store", + default="", + help="path of mindir", + ) + + parser.addoption( + "--output_dir", + action="store", + default="", + help="convert output dir", + ) + + parser.addoption( + "--config_dir", + action="store", + default="", + help="convert config dir", + ) + + parser.addoption( + "--so_path", + action="store", + default="", + help="path of mindspore_lite tools", + ) @pytest.fixture def device_id(request): @@ -48,6 +75,33 @@ def device_id(request): """ return list(set(request.config.getoption("device_id"))) +@pytest.fixture +def so_path(request): + """ + so_path fixture + """ + return request.config.getoption("so_path") + +@pytest.fixture +def mindir_dir(request): + """ + mindir_dir fixture + """ + return request.config.getoption("mindir_dir") + +@pytest.fixture +def output_dir(request): + """ + output_dir fixture + """ + return request.config.getoption("output_dir") + +@pytest.fixture +def config_dir(request): + """ + config_dir fixture + """ + return request.config.getoption("config_dir") def _parse_backend_mark(item, device_id_option): """ diff --git a/mindspore-lite/test/st/python/python_api/test_model_split.py b/mindspore-lite/test/st/python/python_api/test_model_split.py new file mode 100644 index 0000000000000000000000000000000000000000..0624967370604338782d9d9cc7c4fd389718df60 --- /dev/null +++ b/mindspore-lite/test/st/python/python_api/test_model_split.py @@ -0,0 +1,68 @@ +# Copyright 2025 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +""" +Test for MindSpore graph split +""" + +import os +import mindspore_lite as mslite +import numpy as np + + +def test_graph_split_01_convert(mindir_dir, so_path, output_dir, config_dir): + ''' + test model cache convert. + ''' + fmk_type = "ONNX" + config_file = os.path.join(config_dir, 'graph_split.config') + model_path = os.path.join(mindir_dir, "02-seg_3.onnx") + output_path = output_dir + "graph_split" + cmd_string = so_path + "/tools/converter/converter/converter_lite " + \ + " --modelFile=" + model_path + \ + " --optimize=ascend_oriented " + \ + " --outputFile=" + output_path + \ + " --fmk=" + fmk_type + \ + " --configFile=" + config_file + \ + " --inputShape=input:1,289,289,3" + ret = os.system(cmd_string) + if ret != 0: + raise RuntimeError("model convert failed, cmd_string is: ", cmd_string) + +def test_graph_split_02_build(output_dir): + ''' + test model cache build and inference. + ''' + dtype_map = { + mslite.DataType.FLOAT32: np.float32, + mslite.DataType.INT32: np.int32, + mslite.DataType.FLOAT16: np.float16, + mslite.DataType.INT8: np.int8 + } + context = mslite.Context() + context.target = ["ascend"] + context.ascend.devcie_id = 0 + try: + runner = mslite.MultiModelRunner() + model_path = output_dir + 'graph_split.mindir' + runner.build_from_file(model_path, mslite.ModelType.MINDIR, context) + execs = runner.get_model_executor() + for exec_ in execs: + exec_inputs = exec_.get_inputs() + for input_ in exec_inputs: + data = np.random.randn(*input_.shape).astype(dtype_map[input_.dtype]) + input_.set_data_from_numpy(data) + exec_.predict(exec_inputs) + except Exception as excep: + raise RuntimeError('run graph split model failed!') from excep diff --git a/mindspore-lite/test/st/python/test_inference_cloud_nocofig.py b/mindspore-lite/test/st/python/test_inference_cloud_nocofig.py index f94a2dc7dc40dbc89841d87b32b76041cc596af7..9bccf9e9d1cb55bfc3eb2bb1e91fe6e863d74440 100644 --- a/mindspore-lite/test/st/python/test_inference_cloud_nocofig.py +++ b/mindspore-lite/test/st/python/test_inference_cloud_nocofig.py @@ -26,6 +26,9 @@ error_happened = [] def lite_test(func): + ''' + wrapper of test lite + ''' @wraps(func) def wrap_test(*args, **kwargs): try: @@ -36,7 +39,9 @@ def lite_test(func): traceback.print_exc() print(f"[FAILED] {str(func.__name__)}") global error_happened - error_happened.append(str(func.__name__)) + error_happened_local = error_happened + error_happened_local.append(str(func.__name__)) + error_happened = error_happened_local return wrap_test @@ -46,12 +51,15 @@ def handle_error(): print(f"test_inference_cloud_nocofig.py: run testcases failed: {error_happened}") sys.exit(1) else: - print(f"test_inference_cloud_nocofig.py: run testcases success") + print("test_inference_cloud_nocofig.py: run testcases success") # ============================ ascend inference ============================ @lite_test def test_model_group_inference_ascend(mindir_dir): + ''' + test model group + ''' context = mslite.Context() context.target = ["ascend"] context.ascend.device_id = 0 @@ -67,7 +75,7 @@ def test_model_group_inference_ascend(mindir_dir): model0.build_from_file(model_path0, mslite.ModelType.MINDIR, context) model1.build_from_file(model_path1, mslite.ModelType.MINDIR, context) - for i in range(2): + for _ in range(2): inputs = [mslite.Tensor(np.ones((4, 4), np.float32))] outputs = model0.predict(inputs) assert (outputs[0].get_data_to_numpy() == np.ones((4, 4), np.float32)).all() @@ -78,6 +86,9 @@ def test_model_group_inference_ascend(mindir_dir): @lite_test def test_model_invalid_dynamic_dims_error_ascend(mindir_dir): + ''' + test invalid dynamic dims + ''' context = mslite.Context() context.target = ["ascend"] context.ascend.device_id = 0 @@ -93,47 +104,6 @@ def test_model_invalid_dynamic_dims_error_ascend(mindir_dir): except RuntimeError as ex: assert "build_from_file failed" in str(ex) -@lite_test -def test_graph_split_ascend(mindir_dir): - dtype_map = { - mslite.DataType.FLOAT32: np.float32, - mslite.DataType.INT32: np.int32, - mslite.DataType.FLOAT16: np.float16, - mslite.DataType.INT8: np.int8 - } - - converter = mslite.converter.Converter() - converter.save_type = mslite.ModelType.MINDIR - converter.optimize = "ascend_oriented" - converter.input_shape = {"input": [1, 289, 289, 3]} - config_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), - '../graph_split.config') - print("config path:", config_path) - try: - converter.convert(mslite.FmkType.ONNX, os.path.join(mindir_dir, "02-seg_3.onnx"), - os.path.join(os.path.dirname(os.path.abspath(__file__)), - '../ms_models/graph_split'), config_file=config_path) - except: - raise RuntimeError('convert graph split model failed!') - - context = mslite.Context() - context.target = ["ascend"] - context.ascend.devcie_id = 0 - try: - runner = mslite.MultiModelRunner() - model_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), - '../ms_models/graph_split.mindir') - runner.build_from_file(model_path, mslite.ModelType.MINDIR, context) - execs = runner.get_model_executor() - for exec_ in execs: - exec_inputs = exec_.get_inputs() - for input_ in exec_inputs: - data = np.random.randn(*input_.shape).astype(dtype_map[input_.dtype]) - input_.set_data_from_numpy(data) - exec_.predict(exec_inputs) - except: - raise RuntimeError('run graph split model failed!') - if __name__ == '__main__': print("test_inference_cloud_nocofig.py: begin run testcases.") model_dir = sys.argv[1] diff --git a/mindspore-lite/test/st/scripts/ascend/run_cloud_arm_a2.sh b/mindspore-lite/test/st/scripts/ascend/run_cloud_arm_a2.sh index b9903ab383289215df5a559ca794c4f5409345b7..359fc2e1bf0fa074dd6c91c9473f3d198329dd7a 100644 --- a/mindspore-lite/test/st/scripts/ascend/run_cloud_arm_a2.sh +++ b/mindspore-lite/test/st/scripts/ascend/run_cloud_arm_a2.sh @@ -401,6 +401,7 @@ if [[ "${MSLITE_ENABLE_COVERAGE}" == "on" || "${MSLITE_ENABLE_COVERAGE}" == "ON" python3 -m coverage run --rcfile=${MSLITE_COVERAGE_FILE} -m pytest test_acl_profiling.py || exit 1 python3 -m coverage run --rcfile=${MSLITE_COVERAGE_FILE} -m pytest test_encrypt_and_decrypt.py || exit 1 python3 -m coverage run --rcfile=${MSLITE_COVERAGE_FILE} -m pytest test_stream_sync_timeout.py -c pytest.ini --device_id ${device_id} || exit 1 + python3 -m coverage run --rcfile=${MSLITE_COVERAGE_FILE} -m pytest test_model_split.py --mindir_dir=${models_path}/ --so_path=${benchmark_test_path}/mindspore-lite-${version}-linux-${arch}/ --output_dir=${ms_models_path}/ --config_dir=${basepath}/../${config_folder}/ascend/ || exit 1 else pytest test_tensor.py || exit 1 pytest test_model.py || exit 1 @@ -410,6 +411,7 @@ else pytest test_acl_profiling.py || exit 1 pytest test_encrypt_and_decrypt.py || exit 1 pytest test_stream_sync_timeout.py -c pytest.ini --device_id ${device_id} || exit 1 + pytest test_model_split.py --mindir_dir=${models_path}/ --so_path=${benchmark_test_path}/mindspore-lite-${version}-linux-${arch}/ --output_dir=${ms_models_path}/ --config_dir=${basepath}/../${config_folder}/ascend/ || exit 1 fi echo "---------- Run MindSpore Lite API SUCCESS ----------" #---------------------------------------------------------