From ac4ef3a58895e05a194761cede66ded17e4e9217 Mon Sep 17 00:00:00 2001 From: 0xNaN Date: Thu, 22 Oct 2020 00:56:43 +0200 Subject: [PATCH] removing docs since interface is now the same of pytorch, LOC < 50 ;) --- tinygrad/gradcheck.py | 36 ------------------------------------ 1 file changed, 36 deletions(-) diff --git a/tinygrad/gradcheck.py b/tinygrad/gradcheck.py index 0c21933d45..1ec8ce9dbe 100644 --- a/tinygrad/gradcheck.py +++ b/tinygrad/gradcheck.py @@ -2,16 +2,6 @@ import numpy as np from tinygrad.tensor import Tensor def jacobian(func, input): - """ - Compute the (analytical) Jacobian of func w.r.t. input. - - func : A tinygrad func - input : An input - - returns: - - J : Jacobian - """ output = func(input) ji = input.data.reshape(-1).shape[-1] @@ -34,20 +24,6 @@ def mask_like(like, mask_inx, mask_value = 1.0): return mask.reshape(like.shape) def numerical_jacobian(func, input, eps = 1e-6): - """ - Compute the Jacobian through Finite-Difference Approximation. - Somewhat inspired by [1] but not followed closely. - - func : A tinygrad func - input : An input - eps : Perturbation step - - returns: - - NJ : an approx. of the Jacobian - - [1]: https://timvieira.github.io/blog/post/2017/04/21/how-to-test-gradient-implementations/ - """ output = func(input) ji = input.data.reshape(-1).shape[-1] @@ -67,18 +43,6 @@ def numerical_jacobian(func, input, eps = 1e-6): return NJ def gradcheck(func, input, eps = 1e-06, atol = 1e-5, rtol = 0.001): - """ - Checks whether the numerical approx. of the Jacobian of func w.r.t input is close to the - analytical one. - - func : A tinygrad func - input : An input - eps : Perturbation step - atol, rtol: Params for the numpy.allclose test - - returns: - test_passed : Bool, whether the test passed - """ NJ = numerical_jacobian(func, input, eps) J = jacobian(func, input)