diff options
author | slaren <slarengh@gmail.com> | 2023-11-30 22:42:23 +0100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-11-30 23:42:23 +0200 |
commit | f4d973cecb7368c985720ba9100ae6abba14806d (patch) | |
tree | 04ab2280be42dec28a141e73ef1363eb925af5f9 | |
parent | 954e22858c5cea1dc03e9172d3879402af2b5990 (diff) |
convert.py : fix llama/llama2 conversion due to vocab_size=-1 (#4258)
-rwxr-xr-x | convert.py | 2 |
1 files changed, 1 insertions, 1 deletions
@@ -267,7 +267,7 @@ class Params: n_ctx = 2048 return Params( - n_vocab = config.get("vocab_size", model["tok_embeddings.weight"].shape[0]), + n_vocab = model["tok_embeddings.weight"].shape[0], n_embd = config["dim"], n_layer = config["n_layers"], n_ctx = n_ctx, |