commit:     34ff68eb3224cbc7feb8046b134185d085dfa1be
Author:     Alfredo Tupone <tupone <AT> gentoo <DOT> org>
AuthorDate: Thu Sep 11 18:25:55 2025 +0000
Commit:     Alfredo Tupone <tupone <AT> gentoo <DOT> org>
CommitDate: Thu Sep 11 18:36:29 2025 +0000
URL:        https://gitweb.gentoo.org/repo/gentoo.git/commit/?id=34ff68eb

sci-ml/tokenizers: add 0.22.0

Signed-off-by: Alfredo Tupone <tupone <AT> gentoo.org>

 sci-ml/tokenizers/Manifest                 |   3 +
 sci-ml/tokenizers/tokenizers-0.22.0.ebuild | 142 +++++++++++++++++++++++++++++
 2 files changed, 145 insertions(+)

diff --git a/sci-ml/tokenizers/Manifest b/sci-ml/tokenizers/Manifest
index 48e02199c16d..023e0d7beade 100644
--- a/sci-ml/tokenizers/Manifest
+++ b/sci-ml/tokenizers/Manifest
@@ -1,3 +1,6 @@
 DIST tokenizers-0.21.4-crates.tar.xz 20743760 BLAKE2B 
ddf6cd0c900c74a0c10232111e600f8a11533ea699c525c2e6a3fa47629c83645893904fb9fd74cb2e2d9ce7211a38807a2346fdd26a251069dcf101dc7c8870
 SHA512 
99be2a64790df9f17329ccb65243642dd5c9c1ff950be7d6211ef502eb368b05392984bbaca267801b0755aacef552c7b24cfbadb84b1b907fd543702e242d87
 DIST tokenizers-0.21.4.gh.tar.gz 1552701 BLAKE2B 
6587f635b5194493f45f9975fe154e1c8bd79c3e0d35db2b868567fe772a5a8ed0a5db4df6be013099e5869d1ca2a19fc5f5e80a0882ecdc859a18506596b683
 SHA512 
7ed897db3b3edfec740c97311cdab072a90d9a8f637a7fbb23d1595b38e9215c840861531ed0e5284d62a669e1760876ee7f26e0db466a8d404464afd1f37e1a
+DIST tokenizers-0.22.0-crates.tar.xz 20740996 BLAKE2B 
fcd0fcd907ae8e9c9aa86eb31ab8aaef5767e9279692dc2c2218673a4b7be73795ca41f91c662e10bfd31eb91af1199e01f8fde1ceb69c189a9bb0c279196b47
 SHA512 
30bd9df1975ff6388708057e6266f981a8dc65664aab6dd81fb9f884f3631fd56c2ea8f1ac2872f2c566f8a656cf2f36ef1ee4fcebb2cbd74c6b0a0e1b14c373
+DIST tokenizers-0.22.0.gh.tar.gz 1563351 BLAKE2B 
8cfaa21540553fb7bcb9cf879e0743ab9a37e90bc309f0df1ac9ac0d0de80121e36212e57bc5b0879448de21c712fb96ff91fb07f80140b555f1d8082f704040
 SHA512 
9f3e4ea335aa6796acc23fbd366fef8555b76716669f06ee9c45babe95918b8ef110b524cdf1471678bd40832be5ed95200024ea6a8f7f9fcfe1c9c99cbab78f
 DIST tokenizers-python-0.21.4-crates.tar.xz 10997708 BLAKE2B 
1e8aa5a1bc5c2592d310a72b0bed8e301ba7e294643425fe872eb300b97280ce6e5a8d895afe0175c1443096e314053c23b43388d16f1c01f51d9926b0be1ef8
 SHA512 
bc8a978ebb01c784b538013a8656863aae6a1e25b40c7fb9fde878e4351abaf5a8934a903fb60ac4ce802f8f35ebe393f0e2888759b71689f2f2df4f1c88a02d
+DIST tokenizers-python-0.22.0-crates.tar.xz 14036656 BLAKE2B 
f39fa18b1c08d93070d904121eb7cf599ea1c4d92e80b18dcfda664b524aeecacff878e831e76c85ed3f4dd452829d81acc502bb61e2fa6440895862c7e54103
 SHA512 
1f782936325b427b802b21347ac9469fe7af466b972b571f08f77dc09851744b05557a56b3103d2f87dcbf462a56abd6345d8c641a9e929bf07168edc147b646

diff --git a/sci-ml/tokenizers/tokenizers-0.22.0.ebuild 
b/sci-ml/tokenizers/tokenizers-0.22.0.ebuild
new file mode 100644
index 000000000000..be3136b60950
--- /dev/null
+++ b/sci-ml/tokenizers/tokenizers-0.22.0.ebuild
@@ -0,0 +1,142 @@
+# Copyright 2023-2025 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+# Autogenerated by pycargoebuild 0.15.0
+
+EAPI=8
+
+DISTUTILS_USE_PEP517=maturin
+PYTHON_COMPAT=( python3_{10..13} )
+DISTUTILS_EXT=1
+DISTUTILS_SINGLE_IMPL=1
+RUST_MIN_VER="1.82.0"
+
+CRATES="
+"
+
+inherit cargo distutils-r1
+
+DESCRIPTION="Implementation of today's most used tokenizers"
+HOMEPAGE="https://github.com/huggingface/tokenizers";
+SRC_URI="
+       https://github.com/huggingface/${PN}/archive/refs/tags/v${PV}.tar.gz
+       -> ${P}.gh.tar.gz
+       ${CARGO_CRATE_URIS}
+"
+if [[ ${PKGBUMPING} != ${PVR} ]]; then
+       SRC_URI+="
+               https://dev.gentoo.org/~tupone/distfiles/${P}-crates.tar.xz
+               
https://dev.gentoo.org/~tupone/distfiles/${PN}-python-${PV}-crates.tar.xz
+       "
+fi
+
+LICENSE="Apache-2.0"
+# Dependent crate licenses
+LICENSE+="
+       Apache-2.0 Apache-2.0-with-LLVM-exceptions BSD-2 BSD ISC MIT MPL-2.0
+       Unicode-DFS-2016
+"
+SLOT="0"
+KEYWORDS="~amd64"
+
+RDEPEND="dev-libs/oniguruma"
+BDEPEND="
+       test? ( sci-ml/datasets[${PYTHON_SINGLE_USEDEP}] )
+       $(python_gen_cond_dep '
+               dev-python/setuptools-rust[${PYTHON_USEDEP}]
+       ')
+"
+
+distutils_enable_tests pytest
+
+QA_FLAGS_IGNORED=".*/site-packages/tokenizers/.*so"
+
+src_unpack() {
+       cargo_src_unpack
+}
+
+pkg_setup() {
+       python-single-r1_pkg_setup
+       rust_pkg_setup
+}
+
+src_prepare() {
+       default
+       cd bindings/python
+       eapply "${FILESDIR}"/${PN}-0.21.2-test.patch
+       distutils-r1_src_prepare
+}
+
+src_configure() {
+       cd tokenizers
+       cargo_src_configure
+       cd ../bindings/python
+       distutils-r1_src_configure
+}
+
+src_compile() {
+       export RUSTONIG_SYSTEM_LIBONIG=1
+       cd tokenizers
+       cargo_src_compile
+       cd ../bindings/python
+       distutils-r1_src_compile
+}
+
+src_test() {
+       cd tokenizers
+       # Tests do not work
+       #cargo_src_test
+       cd ../bindings/python
+       local -x EPYTEST_IGNORE=( benches/ )
+       local -x EPYTEST_DESELECT=(
+               tests/bindings/test_encoding.py::TestEncoding::test_sequence_ids
+               tests/bindings/test_encoding.py::TestEncoding::test_n_sequences
+               
tests/bindings/test_encoding.py::TestEncoding::test_word_to_tokens
+               
tests/bindings/test_encoding.py::TestEncoding::test_word_to_chars
+               
tests/bindings/test_encoding.py::TestEncoding::test_token_to_sequence
+               
tests/bindings/test_encoding.py::TestEncoding::test_token_to_chars
+               
tests/bindings/test_encoding.py::TestEncoding::test_token_to_word
+               
tests/bindings/test_encoding.py::TestEncoding::test_char_to_token
+               tests/bindings/test_encoding.py::TestEncoding::test_char_to_word
+               tests/bindings/test_encoding.py::TestEncoding::test_truncation
+               
tests/bindings/test_encoding.py::TestEncoding::test_invalid_truncate_direction
+               tests/bindings/test_models.py::TestBPE::test_instantiate
+               tests/bindings/test_models.py::TestWordLevel::test_instantiate
+               tests/bindings/test_models.py::TestWordPiece::test_instantiate
+               
tests/bindings/test_processors.py::TestByteLevelProcessing::test_processing
+               
tests/bindings/test_trainers.py::TestUnigram::test_continuing_prefix_trainer_mismatch
+               tests/bindings/test_trainers.py::TestUnigram::test_train
+               
tests/bindings/test_trainers.py::TestUnigram::test_train_parallelism_with_custom_pretokenizer
+               
tests/documentation/test_pipeline.py::TestPipeline::test_pipeline
+               
tests/documentation/test_pipeline.py::TestPipeline::test_bert_example
+               
tests/implementations/test_char_bpe.py::TestCharBPETokenizer::test_basic_encode
+               
tests/implementations/test_char_bpe.py::TestCharBPETokenizer::test_lowercase
+               
tests/implementations/test_char_bpe.py::TestCharBPETokenizer::test_decoding
+               
tests/implementations/test_char_bpe.py::TestCharBPETokenizer::test_multiprocessing_with_parallelism
+               
tests/test_serialization.py::TestSerialization::test_full_serialization_albert
+               tests/test_serialization.py::TestSerialization::test_str_big
+               
tests/bindings/test_tokenizer.py::TestTokenizer::test_encode_formats
+               
tests/bindings/test_tokenizer.py::TestTokenizer::test_encode_add_special_tokens
+               
tests/bindings/test_tokenizer.py::TestTokenizer::test_from_pretrained
+               
tests/bindings/test_tokenizer.py::TestTokenizer::test_from_pretrained_revision
+               
tests/bindings/test_tokenizer.py::TestTokenizer::test_encode_special_tokens
+               tests/bindings/test_tokenizer.py::TestTokenizer::test_splitting
+               
tests/documentation/test_quicktour.py::TestQuicktour::test_quicktour
+               
tests/documentation/test_tutorial_train_from_iterators.py::TestTrainFromIterators::test_datasets
+               
tests/documentation/test_tutorial_train_from_iterators.py::TestTrainFromIterators::test_gzip
+               
tests/implementations/test_bert_wordpiece.py::TestBertWordPieceTokenizer::test_basic_encode
+               
tests/implementations/test_bert_wordpiece.py::TestBertWordPieceTokenizer::test_multiprocessing_with_parallelism
+               
tests/implementations/test_byte_level_bpe.py::TestByteLevelBPE::test_basic_encode
+               
tests/implementations/test_byte_level_bpe.py::TestByteLevelBPE::test_add_prefix_space
+               
tests/implementations/test_byte_level_bpe.py::TestByteLevelBPE::test_lowerspace
+               
tests/implementations/test_byte_level_bpe.py::TestByteLevelBPE::test_multiprocessing_with_parallelism
+
+       )
+       distutils-r1_src_test
+}
+
+src_install() {
+       cd tokenizers
+       cd ../bindings/python
+       distutils-r1_src_install
+}

Reply via email to