diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index baafa9a8..d5bced9f 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -29,3 +29,22 @@ jobs: run: | uv run black --check src uv run black --check tests + + validate-dependencies: + name: Validate python_depends.json + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.12" + + - name: Validate python_depends.json is up-to-date + run: | + python update_python_depends.py --validate || { + echo "Error: python_depends.json is out of date." + echo "Please run: python update_python_depends.py" + exit 1 + } diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index a9e8518d..c4e182bc 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -54,9 +54,9 @@ jobs: run: | uv run pytest tests - - name: Run dependency test with dependency installed + - name: Re-run dependency test with dependencies installed run: | - uv pip install nvidia-cutlass-dsl + uv pip install einops nvidia-cutlass-dsl uv run pytest tests/test_deps.py - name: Run staging tests @@ -66,19 +66,6 @@ jobs: HUGGINGFACE_CO_STAGING=true uv run pytest --token -m "is_staging_test" tests/ if: matrix.python_version == '3.10' && matrix.torch-version == '2.7.0' - - name: Check kernel conversion - run: | - uv pip install wheel - uv run kernels to-wheel kernels-community/triton-layer-norm 0.0.1 - uv pip install triton_layer_norm-0.0.1*.whl - uv run python -c "import triton_layer_norm" - - - name: Check kernel conversion (flat build) - run: | - uv run kernels to-wheel kernels-test/flattened-build 0.0.1 - uv pip install flattened_build-0.0.1*.whl - uv run python -c "import flattened_build" - - name: Check README generation # For now, just checks that generation doesn't fail. run: | diff --git a/docs/source/cli.md b/docs/source/cli.md index 65a014b9..c7588c51 100644 --- a/docs/source/cli.md +++ b/docs/source/cli.md @@ -20,31 +20,6 @@ Checking variant: torch28-cxx11-cu128-aarch64-linux [...] ``` -### kernels to-wheel - -We strongly recommend downloading kernels from the Hub using the `kernels` -package, since this comes with large [benefits](index.md) over using Python -wheels. That said, some projects may require deployment of kernels as -wheels. The `kernels` utility provides a simple solution to this. You can -convert any Hub kernel into a set of wheels with the `to-wheel` command: - -```bash -$ kernels to-wheel drbh/img2grey 1.1.2 -☸ img2grey-1.1.2+torch27cu128cxx11-cp39-abi3-manylinux_2_28_x86_64.whl -☸ img2grey-1.1.2+torch26cu124cxx11-cp39-abi3-manylinux_2_28_x86_64.whl -☸ img2grey-1.1.2+torch26cu126cxx11-cp39-abi3-manylinux_2_28_x86_64.whl -☸ img2grey-1.1.2+torch27cu126cxx11-cp39-abi3-manylinux_2_28_x86_64.whl -☸ img2grey-1.1.2+torch26cu126cxx98-cp39-abi3-manylinux_2_28_x86_64.whl -☸ img2grey-1.1.2+torch27cu128cxx11-cp39-abi3-manylinux_2_28_aarch64.whl -☸ img2grey-1.1.2+torch26cu126cxx98-cp39-abi3-manylinux_2_28_aarch64.whl -☸ img2grey-1.1.2+torch27cu126cxx11-cp39-abi3-manylinux_2_28_aarch64.whl -☸ img2grey-1.1.2+torch26cu126cxx11-cp39-abi3-manylinux_2_28_aarch64.whl -☸ img2grey-1.1.2+torch26cu118cxx98-cp39-abi3-manylinux_2_28_x86_64.whl -☸ img2grey-1.1.2+torch26cu124cxx98-cp39-abi3-manylinux_2_28_x86_64.whl -☸ img2grey-1.1.2+torch26cu118cxx11-cp39-abi3-manylinux_2_28_x86_64.whl -☸ img2grey-1.1.2+torch27cu118cxx11-cp39-abi3-manylinux_2_28_x86_64.whl -``` - ### kernels upload Use `kernels upload --repo_id="hub-username/kernel"` to upload diff --git a/pyproject.toml b/pyproject.toml index 4c99dc20..76dace61 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "kernels" -version = "0.11.1.dev0" +version = "0.11.4" description = "Download compute kernels" authors = [ { name = "OlivierDehaene", email = "olivier@huggingface.co" }, @@ -46,6 +46,9 @@ kernels = "kernels.cli:main" [project.entry-points."egg_info.writers"] "kernels.lock" = "kernels.lockfile:write_egg_lockfile" +[tool.setuptools.package-data] +kernels = ["python_depends.json"] + [tool.isort] profile = "black" line_length = 119 diff --git a/src/kernels/cli.py b/src/kernels/cli.py index bc8dece0..0d95533e 100644 --- a/src/kernels/cli.py +++ b/src/kernels/cli.py @@ -12,7 +12,6 @@ from kernels.utils import install_kernel, install_kernel_all_variants from .doc import generate_readme_for_kernel -from .wheel import build_variant_to_wheel BUILD_VARIANT_REGEX = re.compile(r"^(torch\d+\d+|torch-)") @@ -92,25 +91,6 @@ def main(): ) lock_parser.set_defaults(func=lock_kernels) - to_wheel_parser = subparsers.add_parser( - "to-wheel", help="Convert a kernel to a wheel file" - ) - to_wheel_parser.add_argument("repo_id", type=str, help="The kernel repo ID") - to_wheel_parser.add_argument("version", type=str, help="The kernel version") - to_wheel_parser.add_argument( - "--python-version", - type=str, - default="3.9", - help="The minimum Python version. Must match the Python version that the kernel was compiled for.", - ) - to_wheel_parser.add_argument( - "--manylinux-version", - type=str, - default="2.28", - help="The manylinux version. Must match the manylinux version that the kernel was compiled for.", - ) - to_wheel_parser.set_defaults(func=kernels_to_wheel) - # Add generate-readme subcommand parser generate_readme_parser = subparsers.add_parser( "generate-readme", @@ -174,24 +154,6 @@ def download_kernels(args): sys.exit(1) -def kernels_to_wheel(args): - variants_path = install_kernel_all_variants( - repo_id=args.repo_id, revision=f"v{args.version}" - ) - for variant_path in variants_path.iterdir(): - if not variant_path.is_dir(): - continue - wheel_path = build_variant_to_wheel( - manylinux_version=args.manylinux_version, - python_version=args.python_version, - repo_id=args.repo_id, - version=args.version, - variant_path=variant_path, - wheel_dir=Path("."), - ) - print(f"☸️ {wheel_path.name}", file=sys.stderr) - - def lock_kernels(args): with open(args.project_dir / "pyproject.toml", "rb") as f: data = tomllib.load(f) diff --git a/src/kernels/deps.py b/src/kernels/deps.py index 32b6a570..21fd4d07 100644 --- a/src/kernels/deps.py +++ b/src/kernels/deps.py @@ -1,27 +1,44 @@ import importlib.util -from typing import List, Set +import json +from pathlib import Path +from typing import Dict, List -allowed_dependencies: Set[str] = { - "einops", - "nvidia-cutlass-dsl", -} +try: + with open(Path(__file__).parent / "python_depends.json", "r") as f: + DEPENDENCY_DATA: Dict = json.load(f) +except FileNotFoundError: + raise FileNotFoundError( + "Cannot load dependency data, is `kernels` correctly installed?" + ) -def validate_dependencies(dependencies: List[str]): +def validate_dependencies(dependencies: List[str], backend: str): """ Validate a list of dependencies to ensure they are installed. Args: - dependencies (`List[str]`): A list of dependency strings. + dependencies (`List[str]`): A list of dependency strings to validate. + backend (`str`): The backend to validate dependencies for. """ + general_deps = DEPENDENCY_DATA.get("general", {}) + backend_deps = DEPENDENCY_DATA.get("backends", {}).get(backend, {}) + + # Validate each dependency for dependency in dependencies: - if dependency not in allowed_dependencies: - allowed = ", ".join(sorted(allowed_dependencies)) - raise ValueError( - f"Invalid dependency: {dependency}, allowed dependencies: {allowed}" - ) + # Look up dependency in general dependencies first, then backend-specific + if dependency in general_deps: + python_packages = general_deps[dependency].get("python", []) + elif dependency in backend_deps: + python_packages = backend_deps[dependency].get("python", []) + else: + # Dependency not found in general or backend-specific dependencies + raise ValueError(f"Invalid dependency: {dependency}") - if importlib.util.find_spec(dependency.replace("-", "_")) is None: - raise ImportError( - f"Kernel requires dependency `{dependency}`. Please install with: pip install {dependency}" - ) + # Check if each python package is installed + for python_package in python_packages: + # Convert package name to module name (replace - with _) + module_name = python_package.replace("-", "_") + if importlib.util.find_spec(module_name) is None: + raise ImportError( + f"Kernel requires Python dependency `{python_package}`. Please install with: pip install {python_package}" + ) diff --git a/src/kernels/python_depends.json b/src/kernels/python_depends.json new file mode 100644 index 00000000..a8a5685b --- /dev/null +++ b/src/kernels/python_depends.json @@ -0,0 +1,35 @@ +{ + "general": { + "einops": { + "nix": [ + "einops" + ], + "python": [ + "einops" + ] + } + }, + "backends": { + "cpu": {}, + "cuda": { + "nvidia-cutlass-dsl": { + "nix": [ + "nvidia-cutlass-dsl" + ], + "python": [ + "nvidia-cutlass-dsl" + ] + } + }, + "metal": {}, + "rocm": {}, + "xpu": { + "onednn": { + "nix": [], + "python": [ + "onednn-devel" + ] + } + } + } +} \ No newline at end of file diff --git a/src/kernels/utils.py b/src/kernels/utils.py index 386ce70a..2cb3fdbc 100644 --- a/src/kernels/utils.py +++ b/src/kernels/utils.py @@ -18,8 +18,8 @@ from kernels._system import glibc_version from kernels._versions import select_revision_or_version -from kernels.lockfile import KernelLock, VariantLock from kernels.deps import validate_dependencies +from kernels.lockfile import KernelLock, VariantLock ENV_VARS_TRUE_VALUES = {"1", "ON", "YES", "TRUE"} @@ -47,6 +47,23 @@ def _get_privateuse_backend_name() -> Optional[str]: return None +def backend() -> str: + import torch + + if torch.version.cuda is not None: + return "cuda" + elif torch.version.hip is not None: + return "hip" + elif torch.backends.mps.is_available(): + return "metal" + elif hasattr(torch.version, "xpu") and torch.version.xpu is not None: + return "xpu" + elif _get_privateuse_backend_name() == "npu": + return "cann" + else: + return "cpu" + + def build_variant() -> str: import torch @@ -117,7 +134,7 @@ def _import_from_path(module_name: str, variant_path: Path) -> ModuleType: with open(metadata_path, "r") as f: metadata = json.load(f) deps = metadata.get("python-depends", []) - validate_dependencies(deps) + validate_dependencies(deps, backend()) file_path = variant_path / "__init__.py" if not file_path.exists(): diff --git a/src/kernels/wheel.py b/src/kernels/wheel.py deleted file mode 100644 index bc33c617..00000000 --- a/src/kernels/wheel.py +++ /dev/null @@ -1,194 +0,0 @@ -import email.policy -import os -from dataclasses import dataclass -from email.message import Message -from importlib.metadata import PackageNotFoundError, version -from pathlib import Path -from typing import Optional - -try: - KERNELS_VERSION = version("kernels") -except PackageNotFoundError: - KERNELS_VERSION = "unknown" - - -@dataclass -class Metadata: - name: str - version: str - cuda_version: Optional[str] - cxx_abi_version: Optional[str] - torch_version: Optional[str] - os: Optional[str] - platform: Optional[str] - - @property - def is_universal(self) -> bool: - return self.platform is None - - -def build_variant_to_wheel( - repo_id: str, - *, - version: str, - variant_path: Path, - wheel_dir: Path, - manylinux_version: str = "2.28", - python_version: str = "3.9", -) -> Path: - """ - Create a wheel file from the variant path. - """ - name = repo_id.split("/")[-1].replace("_", "-") - metadata = extract_metadata(name, version, variant_path) - return build_wheel( - metadata, - variant_path=variant_path, - wheel_dir=wheel_dir, - manylinux_version=manylinux_version, - python_version=python_version, - ) - - -def extract_metadata(name: str, version: str, variant_path: Path) -> Metadata: - """ - Extract metadata from the variant path. - """ - if variant_path.name == "torch-universal": - return Metadata( - name=name, - version=version, - cuda_version=None, - cxx_abi_version=None, - torch_version=None, - os=None, - platform=None, - ) - - if not variant_path.name.startswith("torch"): - raise ValueError("Currently only conversion of Torch kernels is supported.") - - variant_parts = variant_path.name.removeprefix("torch").split("-") - if len(variant_parts) != 5: - raise ValueError(f"Invalid variant name: {variant_path.name}") - - torch_version = f"{variant_parts[0][:-1]}.{variant_parts[0][-1:]}" - cpp_abi_version = variant_parts[1].removeprefix("cxx") - cuda_version = variant_parts[2].removeprefix("cu") - platform = variant_parts[3].replace("-", "_") - os = variant_parts[4] - - return Metadata( - name=name, - version=version, - cuda_version=cuda_version, - cxx_abi_version=cpp_abi_version, - torch_version=torch_version, - os=os, - platform=platform, - ) - - -def build_wheel( - metadata: Metadata, - *, - variant_path: Path, - wheel_dir: Path, - manylinux_version: str = "2.28", - python_version: str = "3.9", -) -> Path: - """ - Build the wheel file. - """ - try: - from wheel.wheelfile import WheelFile # type: ignore - except ImportError: - raise ImportError( - "The 'wheel' package is required to build wheels. Please install it with: `pip install wheel`" - ) - - name = metadata.name.replace("-", "_") - python_version_flat = python_version.replace(".", "") - - if metadata.is_universal: - python_tag = f"py{python_version_flat}" - abi_tag = "none" - platform_tag = "any" - wheel_filename = ( - f"{name}-{metadata.version}-{python_tag}-{abi_tag}-{platform_tag}.whl" - ) - dist_info_dir_name = f"{name}-{metadata.version}.dist-info" - root_is_purelib = "true" - requires_dist_torch = "torch" - else: - python_tag = f"cp{python_version_flat}" - abi_tag = "abi3" - - if ( - metadata.torch_version is None - or metadata.cuda_version is None - or metadata.cxx_abi_version is None - or metadata.os is None - or metadata.platform is None - ): - raise ValueError( - "Torch version, CUDA version, C++ ABI version, OS, and platform must be specified for non-universal wheels." - ) - - local_version = f"torch{metadata.torch_version.replace('.', '')}cu{metadata.cuda_version}cxx{metadata.cxx_abi_version}" - - if metadata.os == "linux": - platform_tag = ( - f"manylinux_{manylinux_version.replace('.', '_')}_{metadata.platform}" - ) - else: - platform_tag = f"{metadata.os}_{metadata.platform.replace('-', '_')}" - - wheel_filename = f"{name}-{metadata.version}+{local_version}-{python_tag}-{abi_tag}-{platform_tag}.whl" - dist_info_dir_name = f"{name}-{metadata.version}+{local_version}.dist-info" - root_is_purelib = "false" - requires_dist_torch = f"torch=={metadata.torch_version}.*" - - wheel_path = wheel_dir / wheel_filename - - wheel_msg = Message(email.policy.compat32) - wheel_msg.add_header("Wheel-Version", "1.0") - wheel_msg.add_header("Generator", f"kernels ({KERNELS_VERSION})") - wheel_msg.add_header("Root-Is-Purelib", root_is_purelib) - wheel_msg.add_header("Tag", f"{python_tag}-{abi_tag}-{platform_tag}") - - metadata_msg = Message(email.policy.compat32) - metadata_msg.add_header("Metadata-Version", "2.1") - metadata_msg.add_header("Name", name) - metadata_msg.add_header("Version", metadata.version) - metadata_msg.add_header("Summary", f"{name} kernel") - metadata_msg.add_header("Requires-Python", ">=3.9") - metadata_msg.add_header("Requires-Dist", requires_dist_torch) - - # Check if the kernel uses a flat build. - if (variant_path / "__init__.py").exists(): - flat_build = True - source_pkg_dir = variant_path - else: - flat_build = False - source_pkg_dir = variant_path / name - - with WheelFile(wheel_path, "w") as wheel_file: - for root, dirnames, filenames in os.walk(source_pkg_dir): - for filename in filenames: - if filename.endswith(".pyc"): - continue - - abs_filepath = os.path.join(root, filename) - entry_name = os.path.relpath(abs_filepath, variant_path) - if flat_build: - entry_name = os.path.join(name, entry_name) - wheel_file.write(abs_filepath, entry_name) - - wheel_metadata_path = os.path.join(dist_info_dir_name, "WHEEL") - wheel_file.writestr(wheel_metadata_path, str(wheel_msg).encode("utf-8")) - - metadata_path = os.path.join(dist_info_dir_name, "METADATA") - wheel_file.writestr(metadata_path, str(metadata_msg).encode("utf-8")) - - return wheel_path diff --git a/tests/test_deps.py b/tests/test_deps.py index 63c9c911..e1ea9983 100644 --- a/tests/test_deps.py +++ b/tests/test_deps.py @@ -5,11 +5,13 @@ from kernels import get_kernel -def test_python_deps(): - must_raise = find_spec("nvidia_cutlass_dsl") is None +@pytest.mark.parametrize("dependency", ["einops", "nvidia-cutlass-dsl"]) +def test_python_deps(dependency): + must_raise = find_spec(dependency.replace("-", "_")) is None if must_raise: with pytest.raises( - ImportError, match=r"Kernel requires dependency `nvidia-cutlass-dsl`" + ImportError, + match=r"Kernel requires Python dependency `(einops|nvidia-cutlass-dsl)`", ): get_kernel("kernels-test/python-dep") else: diff --git a/update_python_depends.py b/update_python_depends.py new file mode 100644 index 00000000..2b7a98d0 --- /dev/null +++ b/update_python_depends.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python3 +""" +Download python_depends.json from the kernel-builder repository. +""" + +import argparse +import json +from pathlib import Path +from typing import Dict +from urllib.request import Request, urlopen + +URL = "https://raw.githubusercontent.com/huggingface/kernel-builder/refs/heads/main/build2cmake/src/python_dependencies.json" +TARGET_DIR = Path(__file__).parent / "src" / "kernels" +TARGET_FILE = TARGET_DIR / "python_depends.json" + + +def download_json(url: str) -> Dict: + """Download JSON from URL and return parsed dict.""" + request = Request(url) + + with urlopen(request, timeout=30) as response: + content = response.read() + + return json.loads(content) + + +def download_file(url: str, target_path: Path) -> None: + """Download file from URL and save to target path.""" + data = download_json(url) + + with open(target_path, "w") as f: + json.dump(data, f, indent=2) + + +def validate_file(url: str, target_path: Path): + """Check if local file is up-to-date with remote version. + + Returns True if up-to-date, False otherwise. + """ + if not target_path.exists(): + return False + + remote_json = download_json(url) + + with open(target_path, "r") as f: + local_json = json.load(f) + + if local_json != remote_json: + raise ValueError(f"Local Python dependencies at {target_path} are out of date.") + + +def main(): + parser = argparse.ArgumentParser( + description="Download or validate python_depends.json" + ) + parser.add_argument( + "--validate", + action="store_true", + help="Validate that local file is up-to-date instead of downloading", + ) + args = parser.parse_args() + + if args.validate: + validate_file(URL, TARGET_FILE) + else: + download_file(URL, TARGET_FILE) + + +if __name__ == "__main__": + main()