test-tokenizer-0.sh 797 B

12345678910111213141516171819202122232425262728293031323334
  1. #!/bin/bash
  2. #
  3. # Usage:
  4. #
  5. # test-tokenizer-0.sh <name> <input>
  6. #
  7. if [ $# -ne 2 ]; then
  8. printf "Usage: $0 <name> <input>\n"
  9. exit 1
  10. fi
  11. name=$1
  12. input=$2
  13. make -j tests/test-tokenizer-0
  14. printf "Testing %s on %s ...\n" $name $input
  15. python3 ./tests/test-tokenizer-0.py ./models/tokenizers/$name --fname-tok $input > /tmp/test-tokenizer-0-$name-py.log 2>&1
  16. cat /tmp/test-tokenizer-0-$name-py.log | grep "tokenized in"
  17. ./tests/test-tokenizer-0 ./models/ggml-vocab-$name.gguf $input > /tmp/test-tokenizer-0-$name-cpp.log 2>&1
  18. cat /tmp/test-tokenizer-0-$name-cpp.log | grep "tokenized in"
  19. diff $input.tok $input.tokcpp > /dev/null 2>&1
  20. if [ $? -eq 0 ]; then
  21. printf "Tokenization is correct!\n"
  22. else
  23. diff $input.tok $input.tokcpp | head -n 32
  24. printf "Tokenization differs!\n"
  25. fi