From 1459c0c0f5570ac07078483ff9ca7c5d6e764c7c Mon Sep 17 00:00:00 2001 From: Michael Gschwind Date: Thu, 10 Nov 2022 19:05:53 -0800 Subject: [PATCH] Fix cuda/cpu check on NoneType (#88854) Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/88854 Fix cuda/cpu check on NoneType Test Plan: sabdcastle/ github CI/CD Differential Revision: D41203955 fbshipit-source-id: f472161a5e3389d03035c6461f7cee102018d5fe --- torch/nn/modules/activation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/torch/nn/modules/activation.py b/torch/nn/modules/activation.py index 5f5615b496d7d05..2960d793e054818 100644 --- a/torch/nn/modules/activation.py +++ b/torch/nn/modules/activation.py @@ -1114,7 +1114,7 @@ def forward(self, query: Tensor, key: Tensor, value: Tensor, key_padding_mask: O # generator expressions. if torch.overrides.has_torch_function(tensor_args): why_not_fast_path = "some Tensor argument has_torch_function" - elif not all([(x.is_cuda or 'cpu' in str(x.device)) for x in tensor_args]): + elif not all([(x is None or x.is_cuda or 'cpu' in str(x.device)) for x in tensor_args]): why_not_fast_path = "some Tensor argument is neither CUDA nor CPU" elif torch.is_grad_enabled() and any([x.requires_grad for x in tensor_args]): why_not_fast_path = ("grad is enabled and at least one of query or the "