mirror of
				https://github.com/KoboldAI/KoboldAI-Client.git
				synced 2025-06-05 21:59:24 +02:00 
			
		
		
		
	Fix vars.modeldim not always being set
This commit is contained in:
		| @@ -637,12 +637,15 @@ if(not vars.model in ["InferKit", "Colab", "OAI", "ReadOnly"]): | ||||
|                     generator = model.generate | ||||
|                 elif(vars.breakmodel):  # Use both RAM and VRAM (breakmodel) | ||||
|                     model = AutoModelForCausalLM.from_pretrained(vars.model) | ||||
|                     vars.modeldim = int(model.transformer.hidden_size) | ||||
|                     device_config(model) | ||||
|                 else: | ||||
|                     model = AutoModelForCausalLM.from_pretrained(vars.model) | ||||
|                     vars.modeldim = int(model.transformer.hidden_size) | ||||
|                     generator = model.generate | ||||
|             else: | ||||
|                 model = AutoModelForCausalLM.from_pretrained(vars.model) | ||||
|                 vars.modeldim = int(model.transformer.hidden_size) | ||||
|                 generator = model.generate | ||||
|          | ||||
|         # Suppress Author's Note by flagging square brackets (Old implementation) | ||||
|   | ||||
		Reference in New Issue
	
	Block a user
	 Gnome Ann
					Gnome Ann