Skip to content

Commit 379420d

Browse files
committed
test(attention): turned global manual seed into fixture
1 parent ab4c79a commit 379420d

1 file changed

Lines changed: 6 additions & 1 deletion

File tree

tests/models/test_causal_self_attention.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,12 @@
1818
flash_attn_varlen_func,
1919
)
2020

21-
torch.manual_seed(0) # FIXME remove or do within tests?
21+
22+
@pytest.fixture(autouse=True)
23+
def _set_torch_seed_per_test():
24+
torch.manual_seed(0)
25+
if torch.cuda.is_available():
26+
torch.cuda.manual_seed_all(0)
2227

2328

2429
@pytest.mark.skipif(torch.cuda.device_count() < 1, reason="This test requires 1 GPU.")

0 commit comments

Comments
 (0)