diff options
author | Dmitry Baranchuk <dmitrybaranchuk@gmail.com> | 2022-09-10 19:33:21 -0700 |
---|---|---|
committer | GitHub <noreply@github.com> | 2022-09-10 19:33:21 -0700 |
commit | 843ad0631c65eabc7f64e80906ecf5482cc1a036 (patch) | |
tree | 07ab541ec59ab3474a711c155daa118fc0ae6864 /tests/test_autograd.py | |
parent | 8d34d36f150b0fd4914cdb56d4e3bda34c029ccc (diff) | |
parent | 2e630b55f51d454f3bd723dffda68a07ef93190c (diff) |
Merge pull request #1 from TimDettmers/main
Update main branch
Diffstat (limited to 'tests/test_autograd.py')
-rw-r--r-- | tests/test_autograd.py | 2 |
1 files changed, 2 insertions, 0 deletions
diff --git a/tests/test_autograd.py b/tests/test_autograd.py index 0cd17c9..bae26de 100644 --- a/tests/test_autograd.py +++ b/tests/test_autograd.py @@ -40,6 +40,7 @@ names = [ ids=names, ) def test_matmul(dim1, dim2, dim3, dim4, funcs, dtype, req_grad, transpose): + if not torch.cuda.is_available(): pytest.skip('No GPU found.') if dim2 > 0: dim2 = dim2 - (dim2 % 16) dim3 = dim3 - (dim3 % 16) @@ -306,6 +307,7 @@ def test_matmullt( has_fp16_weights, has_bias ): + if not torch.cuda.is_available(): pytest.skip('No GPU found.') dimA = (dim2, dim3) if not transpose[0] else (dim3, dim2) dimB = (dim3, dim4) if not transpose[1] else (dim4, dim3) outlier_dim = torch.randint(0, dimA[1], size=(dimA[1] // 8,), device="cuda") |