1
0
Fork 0

Statistics test: check if distributions match torch (#769)

* Check if tensor values match torch

* Clean up randomness tests and remove dependency

* Remove kaiming uniform test
pull/770/head
Jacky Lee 2023-05-07 21:43:23 -07:00 committed by GitHub
parent cb7c22beeb
commit b80cf9220c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 18 additions and 16 deletions

View File

@ -1,6 +1,7 @@
import math
import unittest
import numpy as np
import torch
from tinygrad.tensor import Tensor
# https://gist.github.com/devries/11405101
@ -37,39 +38,40 @@ def kstest(l1, l2):
prob = ksprob((nesq + 0.12 + 0.11 / nesq) * d)
return prob
def equal_distribution(tinygrad_func, numpy_func, shape=(20, 23), alpha=0.05):
Tensor.manual_seed(1337)
np.random.seed(1337)
x = tinygrad_func(*shape).cpu().numpy().flatten()
y = numpy_func(shape).flatten()
p = kstest(x, y)
return p >= alpha
def normal_test(func, shape=(20, 23), alpha=0.05):
y = lambda x: np.random.randn(*x)
p = equal_distribution(func, y, shape=shape, alpha=alpha)
return p >= alpha
x = func(*shape).cpu().numpy().flatten()
y = np.random.randn(*shape).flatten()
return kstest(x, y) >= alpha
def equal_distribution(tiny_func, torch_func, numpy_func, shape=(20, 23), alpha=0.05):
Tensor.manual_seed(1337)
torch.manual_seed(1337)
np.random.seed(1337)
x = tiny_func(*shape).cpu().numpy().flatten()
y = numpy_func(shape).flatten()
z = torch_func(shape).numpy().flatten()
return kstest(x, y) >= alpha and kstest(x, z) >= alpha
class TestRandomness(unittest.TestCase):
def test_rand(self):
self.assertFalse(normal_test(Tensor.rand))
self.assertTrue(equal_distribution(Tensor.rand, lambda x: np.random.rand(*x)))
self.assertTrue(equal_distribution(Tensor.rand, torch.rand, lambda x: np.random.rand(*x)))
def test_randn(self):
self.assertTrue(normal_test(Tensor.randn))
self.assertFalse(equal_distribution(Tensor.randn, lambda x: np.random.rand(*x)))
self.assertTrue(equal_distribution(Tensor.randn, torch.randn, lambda x: np.random.randn(*x)))
def test_uniform(self):
self.assertFalse(normal_test(Tensor.uniform))
self.assertTrue(equal_distribution(Tensor.uniform, lambda x: np.random.rand(*x) * 2 - 1))
self.assertTrue(equal_distribution(Tensor.uniform, lambda x: torch.nn.init.uniform_(torch.empty(x), a=-1, b=1), lambda x: np.random.rand(*x) * 2 - 1))
def test_scaled_uniform(self):
self.assertFalse(normal_test(Tensor.scaled_uniform))
self.assertTrue(equal_distribution(Tensor.scaled_uniform, lambda x: (np.random.rand(*x) * 2 - 1) / math.sqrt(math.prod(x))))
self.assertTrue(equal_distribution(Tensor.scaled_uniform, lambda x: torch.nn.init.uniform_(torch.empty(x), a=-1, b=1) / math.sqrt(math.prod(x)), lambda x: (np.random.rand(*x) * 2 - 1) / math.sqrt(math.prod(x))))
def test_glorot_uniform(self):
self.assertFalse(normal_test(Tensor.glorot_uniform))
self.assertTrue(equal_distribution(Tensor.glorot_uniform, lambda x: (np.random.rand(*x) * 2 - 1) * math.sqrt(6 / (x[0] + math.prod(x[1:])))))
self.assertTrue(equal_distribution(Tensor.glorot_uniform, lambda x: torch.nn.init.xavier_uniform_(torch.empty(x)), lambda x: (np.random.rand(*x) * 2 - 1) * math.sqrt(6 / (x[0] + math.prod(x[1:])))))
if __name__ == "__main__":
unittest.main()