summaryrefslogtreecommitdiff
path: root/convert-hf-to-gguf-update.py
diff options
context:
space:
mode:
authorDAN™ <dranger003@gmail.com>2024-05-05 01:19:30 -0400
committerGitHub <noreply@github.com>2024-05-05 08:19:30 +0300
commit889bdd76866ea31a7625ec2dcea63ff469f3e981 (patch)
treebcfaea83b66f89181980b64dba57a8c8f8f31a49 /convert-hf-to-gguf-update.py
parent6fbd43221167bf96112f899daf22c127b282cbcf (diff)
command-r : add BPE pre-tokenization (#7063)
* Add BPE pre-tokenization for Command-R/R+. * Bump transformers convert requirement. * command-r : add individual digits regex --------- Co-authored-by: Georgi Gerganov <ggerganov@gmail.com>
Diffstat (limited to 'convert-hf-to-gguf-update.py')
-rwxr-xr-xconvert-hf-to-gguf-update.py9
1 files changed, 9 insertions, 0 deletions
diff --git a/convert-hf-to-gguf-update.py b/convert-hf-to-gguf-update.py
index b41a9290..46a22546 100755
--- a/convert-hf-to-gguf-update.py
+++ b/convert-hf-to-gguf-update.py
@@ -66,6 +66,7 @@ models = [
{"name": "starcoder", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/bigcode/starcoder2-3b", },
{"name": "gpt-2", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/openai-community/gpt2", },
{"name": "refact", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/smallcloudai/Refact-1_6-base", },
+ {"name": "command-r", "tokt": TOKENIZER_TYPE.BPE, "repo": "https://huggingface.co/CohereForAI/c4ai-command-r-v01", },
]
# make directory "models/tokenizers" if it doesn't exist
@@ -106,6 +107,14 @@ for model in models:
save_path = f"models/tokenizers/{name}/tokenizer.json"
download_file_with_auth(url, token, save_path)
+ # if downloaded file is less than 1KB, we likely need to download an LFS instead
+ if os.path.getsize(save_path) < 1024:
+ # remove the file
+ os.remove(save_path)
+ url = f"{repo}/resolve/main/tokenizer.json"
+ save_path = f"models/tokenizers/{name}/tokenizer.json"
+ download_file_with_auth(url, token, save_path)
+
if tokt == TOKENIZER_TYPE.SPM:
url = f"{repo}/resolve/main/tokenizer.model"
save_path = f"models/tokenizers/{name}/tokenizer.model"