2023-02-10 11:08:20 -07:00
|
|
|
#!/usr/bin/env python
|
2023-02-26 18:08:24 -07:00
|
|
|
import io
|
2023-02-10 11:08:20 -07:00
|
|
|
import unittest
|
2023-03-20 00:43:49 -06:00
|
|
|
from tinygrad.helpers import getenv
|
2023-03-15 09:59:52 -06:00
|
|
|
from extra.utils import fetch, fake_torch_load_zipped
|
2023-02-26 18:08:24 -07:00
|
|
|
from PIL import Image
|
2023-02-10 11:08:20 -07:00
|
|
|
|
2023-03-20 00:43:49 -06:00
|
|
|
@unittest.skipIf(getenv("CI", "") != "", "no internet tests in CI")
|
|
|
|
class TestFetch(unittest.TestCase):
|
2023-02-10 11:08:20 -07:00
|
|
|
def test_fetch_bad_http(self):
|
|
|
|
self.assertRaises(AssertionError, fetch, 'http://httpstat.us/500')
|
|
|
|
self.assertRaises(AssertionError, fetch, 'http://httpstat.us/404')
|
|
|
|
self.assertRaises(AssertionError, fetch, 'http://httpstat.us/400')
|
2023-03-18 15:40:23 -06:00
|
|
|
|
2023-02-10 11:08:20 -07:00
|
|
|
def test_fetch_small(self):
|
|
|
|
assert(len(fetch('https://google.com'))>0)
|
|
|
|
|
2023-02-26 18:08:24 -07:00
|
|
|
def test_fetch_img(self):
|
|
|
|
img = fetch("https://media.istockphoto.com/photos/hen-picture-id831791190")
|
|
|
|
pimg = Image.open(io.BytesIO(img))
|
|
|
|
assert pimg.size == (705, 1024)
|
|
|
|
|
2023-03-20 00:43:49 -06:00
|
|
|
class TestUtils(unittest.TestCase):
|
2023-03-15 09:59:52 -06:00
|
|
|
def test_fake_torch_load_zipped(self):
|
|
|
|
import torch
|
|
|
|
import numpy as np
|
|
|
|
import tempfile
|
|
|
|
class LayerWithOffset(torch.nn.Module):
|
|
|
|
def __init__(self):
|
|
|
|
super(LayerWithOffset, self).__init__()
|
|
|
|
d = torch.randn(16)
|
|
|
|
self.param1 = torch.nn.Parameter(
|
|
|
|
d.as_strided([2, 2], [2, 3], storage_offset=5)
|
|
|
|
)
|
|
|
|
self.param2 = torch.nn.Parameter(
|
|
|
|
d.as_strided([2, 2], [2, 3], storage_offset=4)
|
|
|
|
)
|
|
|
|
|
|
|
|
for isfloat16 in [True, False]:
|
|
|
|
model = torch.nn.Sequential(
|
|
|
|
torch.nn.Linear(4, 8),
|
|
|
|
torch.nn.Linear(8, 3),
|
|
|
|
LayerWithOffset()
|
|
|
|
)
|
|
|
|
if isfloat16: model = model.half()
|
|
|
|
|
|
|
|
with tempfile.TemporaryDirectory() as tmpdirname:
|
|
|
|
path = tmpdirname + '/testloadmodel.pth'
|
|
|
|
torch.save(model.state_dict(), path)
|
|
|
|
model2 = fake_torch_load_zipped(path)
|
|
|
|
|
|
|
|
for name, a in model.state_dict().items():
|
|
|
|
b = model2[name]
|
|
|
|
a, b = a.numpy(), b.numpy()
|
|
|
|
assert a.shape == b.shape
|
|
|
|
assert a.dtype == b.dtype
|
|
|
|
assert np.array_equal(a, b)
|
|
|
|
|
2023-02-10 11:08:20 -07:00
|
|
|
if __name__ == '__main__':
|
|
|
|
unittest.main()
|