1
0
Fork 0

wgpu.utils._device -> wgpu.utils.device (#2330)

* wgpu.utils._device -> wgpu.utils.device

* can i do this?

* no need to specify metal
pull/2318/merge
chenyu 2023-11-16 12:52:13 -05:00 committed by GitHub
parent 27f4c26312
commit 163b2bc26a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 3 additions and 3 deletions

View File

@ -217,7 +217,7 @@ jobs:
path: ${{ env.Python3_ROOT_DIR }}/lib/python3.11/site-packages
key: metal-webgpu-testing-packages-${{ hashFiles('**/setup.py') }}
- name: Install Dependencies
run: pip install -e '.[metal,webgpu,testing]' --extra-index-url https://download.pytorch.org/whl/cpu
run: pip install -e '.[webgpu,testing]' --extra-index-url https://download.pytorch.org/whl/cpu
- name: Cache model weights
uses: actions/cache@v3
with:

View File

@ -29,7 +29,7 @@ setup(name='tinygrad',
'cuda': ["pycuda"],
'arm': ["unicorn"],
'triton': ["triton-nightly>=2.1.0.dev20231014192330", "pycuda"],
'webgpu': ["wgpu"],
'webgpu': ["wgpu>=v0.12.0"],
'linting': [
"flake8",
"pylint",

View File

@ -1,6 +1,6 @@
import numpy as np
import functools
from wgpu.utils._device import get_default_device
from wgpu.utils.device import get_default_device
from tinygrad.runtime.lib import RawBufferCopyIn, LRUAllocator
from tinygrad.helpers import dtypes, DType
from tinygrad.ops import Compiled