Prevent tokenizer from taking extra time the first time it's used

This commit is contained in:
Gnome Ann 2022-01-17 22:55:25 -05:00
parent 4fac202059
commit 4da1a2d247
1 changed files with 8 additions and 0 deletions

View File

@ -4512,6 +4512,14 @@ def randomGameRequest(topic, memory=""):
loadmodelsettings()
loadsettings()
# Prevent tokenizer from taking extra time the first time it's used
def __preempt_tokenizer():
if("tokenizer" not in globals()):
return
tokenizer.decode([25678, 559])
tokenizer.encode("eunoia")
threading.Thread(target=__preempt_tokenizer).start()
# Precompile TPU backend if required
if(vars.model in ("TPUMeshTransformerGPTJ",)):
soft_tokens = tpumtjgetsofttokens()