mirror of
https://github.com/KoboldAI/KoboldAI-Client.git
synced 2025-06-05 21:59:24 +02:00
Don't crash if XGLMSinusoidalPositionalEmbedding
doesn't exist
This commit is contained in:
@@ -768,7 +768,11 @@ if(not vars.model in ["InferKit", "Colab", "OAI", "ReadOnly", "TPUMeshTransforme
|
|||||||
# Temporary fix for XGLM positional embedding issues until
|
# Temporary fix for XGLM positional embedding issues until
|
||||||
# https://github.com/huggingface/transformers/issues/15736
|
# https://github.com/huggingface/transformers/issues/15736
|
||||||
# is resolved
|
# is resolved
|
||||||
|
try:
|
||||||
from transformers.models.xglm.modeling_xglm import XGLMSinusoidalPositionalEmbedding
|
from transformers.models.xglm.modeling_xglm import XGLMSinusoidalPositionalEmbedding
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
@torch.no_grad()
|
@torch.no_grad()
|
||||||
def new_forward(self, input_ids: torch.Tensor = None, inputs_embeds: torch.Tensor = None, past_key_values_length: int = 0):
|
def new_forward(self, input_ids: torch.Tensor = None, inputs_embeds: torch.Tensor = None, past_key_values_length: int = 0):
|
||||||
bsz, seq_len = inputs_embeds.size()[:-1]
|
bsz, seq_len = inputs_embeds.size()[:-1]
|
||||||
|
Reference in New Issue
Block a user