From 25a6e489c1a77116ce0bdacabd8a23b7d2a19c6d Mon Sep 17 00:00:00 2001 From: henk717 Date: Thu, 23 Dec 2021 17:27:09 +0100 Subject: [PATCH] Remove Replace from Huggingface Accidentally ended up in the wrong section, for downloads we do not replace anything only afterwards. --- aiserver.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/aiserver.py b/aiserver.py index 09a465ae..c3e00602 100644 --- a/aiserver.py +++ b/aiserver.py @@ -890,11 +890,11 @@ if(not vars.model in ["InferKit", "Colab", "OAI", "ReadOnly", "TPUMeshTransforme print("Model does not exist locally, attempting to download from Huggingface...") tokenizer = GPT2TokenizerFast.from_pretrained(vars.model, cache_dir="cache/") with(maybe_use_float16()): - tokenizer = GPT2TokenizerFast.from_pretrained(vars.model.replace('/', '_'), cache_dir="cache/") + tokenizer = GPT2TokenizerFast.from_pretrained(vars.model, cache_dir="cache/") try: - model = AutoModelForCausalLM.from_pretrained(vars.model.replace('/', '_'), cache_dir="cache/", **maybe_low_cpu_mem_usage()) + model = AutoModelForCausalLM.from_pretrained(vars.model, cache_dir="cache/", **maybe_low_cpu_mem_usage()) except ValueError as e: - model = GPTNeoForCausalLM.from_pretrained(vars.model.replace('/', '_'), cache_dir="cache/", **maybe_low_cpu_mem_usage()) + model = GPTNeoForCausalLM.from_pretrained(vars.model, cache_dir="cache/", **maybe_low_cpu_mem_usage()) model = model.half() import shutil shutil.rmtree("cache/")