From 1b1aa8fda97edaca5f5909d74e4fb1960ed5d761 Mon Sep 17 00:00:00 2001 From: Mikayla Gawarecki Date: Wed, 23 Jul 2025 07:48:19 -0700 Subject: [PATCH] Bump tolerances for per_sample_grads tutorial --- intermediate_source/per_sample_grads.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/intermediate_source/per_sample_grads.py b/intermediate_source/per_sample_grads.py index ece80d3f94f..a5ece9303fc 100644 --- a/intermediate_source/per_sample_grads.py +++ b/intermediate_source/per_sample_grads.py @@ -169,7 +169,7 @@ def compute_loss(params, buffers, sample, target): # results of hand processing each one individually: for per_sample_grad, ft_per_sample_grad in zip(per_sample_grads, ft_per_sample_grads.values()): - assert torch.allclose(per_sample_grad, ft_per_sample_grad, atol=3e-3, rtol=1e-5) + assert torch.allclose(per_sample_grad, ft_per_sample_grad, atol=1.2e-1, rtol=1e-5) ###################################################################### # A quick note: there are limitations around what types of functions can be