Skip to content

Commit ed6ae1e

Browse files
authored
[AITER] [ROCm] Fix crash when loading llama4 model with old aiter version installed, fallback to forward_native implementation (vllm-project#29124)
Signed-off-by: Xiao Li <ilx@meta.com>
1 parent 9875be6 commit ed6ae1e

File tree

1 file changed

+13
-6
lines changed

1 file changed

+13
-6
lines changed

vllm/v1/sample/ops/topk_topp_sampler.py

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -60,13 +60,20 @@ def __init__(self, logprobs_mode: LogprobsMode = "raw_logprobs") -> None:
6060
logprobs_mode not in ("processed_logits", "processed_logprobs")
6161
and rocm_aiter_ops.is_enabled()
6262
):
63-
import aiter.ops.sampling # noqa: F401
63+
try:
64+
import aiter.ops.sampling # noqa: F401
6465

65-
self.aiter_ops = torch.ops.aiter
66-
logger.info_once(
67-
"Using aiter sampler on ROCm (lazy import, sampling-only)."
68-
)
69-
self.forward = self.forward_hip
66+
self.aiter_ops = torch.ops.aiter
67+
logger.info_once(
68+
"Using aiter sampler on ROCm (lazy import, sampling-only)."
69+
)
70+
self.forward = self.forward_hip
71+
except ImportError:
72+
logger.warning_once(
73+
"aiter.ops.sampling is not available on ROCm. "
74+
"Falling back to forward_native implementation."
75+
)
76+
self.forward = self.forward_native
7077
else:
7178
self.forward = self.forward_native
7279

0 commit comments

Comments
 (0)