From 42a62b219a21a76e2458cf07da06920ef61f3409 Mon Sep 17 00:00:00 2001 From: boris sigma <1322553126@qq.com> Date: Mon, 7 Jul 2025 19:01:36 +0800 Subject: [PATCH] Fix: Skip batch size check for pixel_values in pipelining --- mindspeed_mm/tasks/inference/pipeline/parallel_wrapper.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mindspeed_mm/tasks/inference/pipeline/parallel_wrapper.py b/mindspeed_mm/tasks/inference/pipeline/parallel_wrapper.py index 936a2a08..143e26ce 100644 --- a/mindspeed_mm/tasks/inference/pipeline/parallel_wrapper.py +++ b/mindspeed_mm/tasks/inference/pipeline/parallel_wrapper.py @@ -167,7 +167,8 @@ class ParallelWrapper: Ensure the first dimension of `model_forward_kwargs` is the batch size. """ - first_dims = [v.shape[0] for k, v in model_forward_kwargs.items() if (k != "position_ids" and k != "cache_position" and v is not None)] + # first_dims = [v.shape[0] for k, v in model_forward_kwargs.items() if (k != "position_ids" and k != "cache_position" and v is not None)] + first_dims = [v.shape[0] for k, v in model_forward_kwargs.items() if (k != "position_ids" and k != "cache_position" and k != "pixel_values" and v is not None)] if "position_ids" in model_forward_kwargs.keys(): first_dims.append(model_forward_kwargs["position_ids"].shape[1]) if not len(set(first_dims)) == 1: -- Gitee