summaryrefslogtreecommitdiff
path: root/gguf-py
diff options
context:
space:
mode:
Diffstat (limited to 'gguf-py')
-rw-r--r--gguf-py/gguf/constants.py2
-rw-r--r--gguf-py/gguf/gguf_reader.py9
-rw-r--r--gguf-py/gguf/gguf_writer.py3
3 files changed, 12 insertions, 2 deletions
diff --git a/gguf-py/gguf/constants.py b/gguf-py/gguf/constants.py
index d2f1de19..6d597bfd 100644
--- a/gguf-py/gguf/constants.py
+++ b/gguf-py/gguf/constants.py
@@ -72,6 +72,7 @@ class Keys:
class Tokenizer:
MODEL = "tokenizer.ggml.model"
+ PRE = "tokenizer.ggml.pre"
LIST = "tokenizer.ggml.tokens"
TOKEN_TYPE = "tokenizer.ggml.token_type"
TOKEN_TYPE_COUNT = "tokenizer.ggml.token_type_count" # for BERT-style token types
@@ -940,6 +941,7 @@ KEY_SSM_TIME_STEP_RANK = Keys.SSM.TIME_STEP_RANK
# tokenization
KEY_TOKENIZER_MODEL = Keys.Tokenizer.MODEL
+KEY_TOKENIZER_PRE = Keys.Tokenizer.PRE
KEY_TOKENIZER_LIST = Keys.Tokenizer.LIST
KEY_TOKENIZER_TOKEN_TYPE = Keys.Tokenizer.TOKEN_TYPE
KEY_TOKENIZER_SCORES = Keys.Tokenizer.SCORES
diff --git a/gguf-py/gguf/gguf_reader.py b/gguf-py/gguf/gguf_reader.py
index 48ef6d4a..2bdb1552 100644
--- a/gguf-py/gguf/gguf_reader.py
+++ b/gguf-py/gguf/gguf_reader.py
@@ -139,8 +139,13 @@ class GGUFReader:
def _push_field(self, field: ReaderField, skip_sum: bool = False) -> int:
if field.name in self.fields:
- raise KeyError(f'Duplicate {field.name} already in list at offset {field.offset}')
- self.fields[field.name] = field
+ # TODO: add option to generate error on duplicate keys
+ # raise KeyError(f'Duplicate {field.name} already in list at offset {field.offset}')
+
+ print(f'Warning: Duplicate key {field.name} at offset {field.offset}')
+ self.fields[field.name + '_{}'.format(field.offset)] = field
+ else:
+ self.fields[field.name] = field
return 0 if skip_sum else sum(int(part.nbytes) for part in field.parts)
def _get_str(self, offset: int) -> tuple[npt.NDArray[np.uint64], npt.NDArray[np.uint8]]:
diff --git a/gguf-py/gguf/gguf_writer.py b/gguf-py/gguf/gguf_writer.py
index ec44ac9f..089aece8 100644
--- a/gguf-py/gguf/gguf_writer.py
+++ b/gguf-py/gguf/gguf_writer.py
@@ -427,6 +427,9 @@ class GGUFWriter:
def add_tokenizer_model(self, model: str) -> None:
self.add_string(Keys.Tokenizer.MODEL, model)
+ def add_tokenizer_pre(self, pre: str) -> None:
+ self.add_string(Keys.Tokenizer.PRE, pre)
+
def add_token_list(self, tokens: Sequence[str] | Sequence[bytes] | Sequence[bytearray]) -> None:
self.add_array(Keys.Tokenizer.LIST, tokens)