mirror of
https://github.com/tinygrad/tinygrad.git
synced 2026-01-09 15:08:02 -05:00
WebGPU f16 support (f16 bounty part 2) (#8653)
* WebGPU f16 support * Don't enable f16 yet * dtype tests passing after bitcast fix * Maybe all WebGPU green? * Require shader-f16 in examples * Minor wgsl touchup * 1 line shorter * Simpler * Add transcendetal support * log2 nan location mismatch on Vulkan * Nan skips
This commit is contained in:
@@ -65,7 +65,7 @@ class TestRandomness(unittest.TestCase):
|
||||
self.assertFalse(normal_test(Tensor.rand))
|
||||
self.assertTrue(equal_distribution(Tensor.rand, torch.rand, lambda x: np.random.rand(*x)))
|
||||
|
||||
@unittest.skipUnless(is_dtype_supported(dtypes.float16), "need float16 support")
|
||||
@unittest.skipUnless(is_dtype_supported(dtypes.float16) and is_dtype_supported(dtypes.ulong), "need float16 and ulong support")
|
||||
def test_rand_float16(self):
|
||||
N = 128
|
||||
x = Tensor.rand((2, N, N), dtype=dtypes.float16)
|
||||
|
||||
Reference in New Issue
Block a user