diff options
author | Tim Dettmers <tim.dettmers@gmail.com> | 2022-08-23 13:59:34 -0700 |
---|---|---|
committer | Tim Dettmers <tim.dettmers@gmail.com> | 2022-08-23 13:59:34 -0700 |
commit | 7e0fb655e1e040221054886fbee9d5682aa6e4e2 (patch) | |
tree | ced850e90aef61e3a74a07c1933924b746cdcda6 /tests/test_autograd.py | |
parent | 9d60b3c5279641ba936facd710c722ebe52fcf40 (diff) |
Some initial code. Needs to be tested.
Diffstat (limited to 'tests/test_autograd.py')
-rw-r--r-- | tests/test_autograd.py | 2 |
1 files changed, 2 insertions, 0 deletions
diff --git a/tests/test_autograd.py b/tests/test_autograd.py index 0cd17c9..bae26de 100644 --- a/tests/test_autograd.py +++ b/tests/test_autograd.py @@ -40,6 +40,7 @@ names = [ ids=names, ) def test_matmul(dim1, dim2, dim3, dim4, funcs, dtype, req_grad, transpose): + if not torch.cuda.is_available(): pytest.skip('No GPU found.') if dim2 > 0: dim2 = dim2 - (dim2 % 16) dim3 = dim3 - (dim3 % 16) @@ -306,6 +307,7 @@ def test_matmullt( has_fp16_weights, has_bias ): + if not torch.cuda.is_available(): pytest.skip('No GPU found.') dimA = (dim2, dim3) if not transpose[0] else (dim3, dim2) dimB = (dim3, dim4) if not transpose[1] else (dim4, dim3) outlier_dim = torch.randint(0, dimA[1], size=(dimA[1] // 8,), device="cuda") |