convert : use correct context length for nomic-embed-text-v2 (#13216)

This commit is contained in:
Jared Van Bortel 2025-05-02 11:41:54 -04:00 committed by GitHub
parent 074e42ab31
commit 7d2123484e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -506,7 +506,7 @@ class TextModel(ModelBase):
def set_gguf_parameters(self):
self.gguf_writer.add_block_count(self.block_count)
if (n_ctx := self.find_hparam(["max_position_embeddings", "n_ctx"], optional=True)) is not None:
if (n_ctx := self.find_hparam(["max_position_embeddings", "n_ctx", "n_positions"], optional=True)) is not None:
self.gguf_writer.add_context_length(n_ctx)
logger.info(f"gguf: context length = {n_ctx}")
@ -3627,8 +3627,13 @@ class NomicBertModel(BertModel):
if self._tokenizer_is_xlmroberta:
self._xlmroberta_tokenizer_init()
# the HF config claims n_ctx=8192, but it uses RoPE scaling
self.hparams["n_ctx"] = 2048
npos, mtp = self.hparams["n_positions"], self.hparams.get("max_trained_positions", 2048)
if npos == 8192 and mtp == 2048:
self.hparams["n_positions"] = 2048 # nomic-embed-text v1 and v1.5 are trained for 2048 tokens.
elif npos == 2048 and mtp == 2048:
self.hparams["n_positions"] = 512 # nomic-embed-text-v2-moe is trained for 512 tokens.
else:
raise ValueError(f"unrecognized parameters: n_positions={npos}, max_trained_positions={mtp}")
assert self.hparams["activation_function"] == "gelu" if self.is_moe else "swiglu"