From 197c5c0b849cfdb4f6844f90c49bb8adba85e1bb Mon Sep 17 00:00:00 2001 From: Wei Wang <109318740+weiwangmeta@users.noreply.github.com> Date: Tue, 6 Dec 2022 06:25:04 -0800 Subject: [PATCH] Fix cuda/cpu check on NoneType (#88854) (#90068) Summary: Fix cuda/cpu check on NoneType Test Plan: sabdcastle/ github CI/CD Differential Revision: D41203955 Pull Request resolved: https://github.com/pytorch/pytorch/pull/88854 Approved by: https://github.com/drisspg, https://github.com/ngimel Co-authored-by: Michael Gschwind --- torch/nn/modules/activation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/torch/nn/modules/activation.py b/torch/nn/modules/activation.py index 60e2db7a53d3cc2..760b963e6c22861 100644 --- a/torch/nn/modules/activation.py +++ b/torch/nn/modules/activation.py @@ -1116,7 +1116,7 @@ def forward(self, query: Tensor, key: Tensor, value: Tensor, key_padding_mask: O # generator expressions. if torch.overrides.has_torch_function(tensor_args): why_not_fast_path = "some Tensor argument has_torch_function" - elif not all([(x.is_cuda or 'cpu' in str(x.device)) for x in tensor_args]): + elif not all([(x is None or x.is_cuda or 'cpu' in str(x.device)) for x in tensor_args]): why_not_fast_path = "some Tensor argument is neither CUDA nor CPU" elif torch.is_grad_enabled() and any([x is not None and x.requires_grad for x in tensor_args]): why_not_fast_path = ("grad is enabled and at least one of query or the "