# Contributor: Patrycja Rosa # Maintainer: Patrycja Rosa pkgname=py3-tokenizers pkgver=0.13.2 pkgrel=1 pkgdesc="Fast State-of-the-Art Tokenizers optimized for Research and Production" url="https://github.com/huggingface/tokenizers" arch="all !s390x !riscv64" # blocked by rust license="Apache-2.0" depends="python3" makedepends=" bzip2-dev cargo openssl-dev>3 py3-gpep517 py3-setuptools py3-setuptools-rust py3-wheel " source="https://github.com/huggingface/tokenizers/archive/refs/tags/python-v$pkgver/py3-tokenizers-$pkgver.tar.gz" builddir="$srcdir/tokenizers-python-v$pkgver/bindings/python" options="net !check" # requires pulling test data build() { gpep517 build-wheel \ --wheel-dir dist \ --output-fd 3 3>&1 >&2 } package() { python3 -m installer -d "$pkgdir" \ dist/tokenizers*.whl } sha512sums=" 01554dd174c8698a09806d388cbb39c490ca719a3c2d39c0ba7e42570c1e258fbe45d5b237d24b565fdfe85b30d30a40031a1dbcf4b2c17dcf84f0f75e16f270 py3-tokenizers-0.13.2.tar.gz "