diff --git a/aiserver.py b/aiserver.py index ec8d05a7..ae8fecb3 100644 --- a/aiserver.py +++ b/aiserver.py @@ -1594,7 +1594,6 @@ def general_startup(override_args=None): arg_parameters['model_path'] = args.path arg_parameters['menu_path'] = "" model_backends[args.model_backend].set_input_parameters(arg_parameters) - logger.info(vars(model_backends[args.model_backend])) koboldai_vars.model = args.model return args.model_backend else: diff --git a/modeling/inference_models/hf_mtj/class.py b/modeling/inference_models/hf_mtj/class.py index 5f19897f..e029db9d 100644 --- a/modeling/inference_models/hf_mtj/class.py +++ b/modeling/inference_models/hf_mtj/class.py @@ -150,7 +150,7 @@ class model_backend(HFInferenceModel): tpu_mtj_backend.socketio = utils.socketio - if utils.koboldai_vars.model == "TPUMeshTransformerGPTNeoX": + if self.model_name == "TPUMeshTransformerGPTNeoX": utils.koboldai_vars.badwordsids = utils.koboldai_vars.badwordsids_neox print( @@ -158,7 +158,7 @@ class model_backend(HFInferenceModel): Colors.PURPLE, Colors.END ) ) - if utils.koboldai_vars.model in ( + if self.model_name in ( "TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX", ) and ( @@ -168,7 +168,7 @@ class model_backend(HFInferenceModel): raise FileNotFoundError( f"The specified model path {repr(utils.koboldai_vars.custmodpth)} is not the path to a valid folder" ) - if utils.koboldai_vars.model == "TPUMeshTransformerGPTNeoX": + if self.model_name == "TPUMeshTransformerGPTNeoX": tpu_mtj_backend.pad_token_id = 2 tpu_mtj_backend.koboldai_vars = utils.koboldai_vars @@ -184,10 +184,10 @@ class model_backend(HFInferenceModel): self.init_model_config() utils.koboldai_vars.allowsp = True - logger.info(self.model) + logger.info(self.model_name) tpu_mtj_backend.load_model( - self.model, - hf_checkpoint=self.model + self.model_name, + hf_checkpoint=self.model_name not in ("TPUMeshTransformerGPTJ", "TPUMeshTransformerGPTNeoX") and utils.koboldai_vars.use_colab_tpu, socketio_queue=koboldai_settings.queue,