Merge pull request #64 from VE-FORBRYDERNE/patch

Prevent tokenizer from taking extra time the first time it's used
This commit is contained in:
henk717 2022-01-18 05:42:17 +01:00 committed by GitHub
commit 509b9a8936
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 8 additions and 0 deletions

View File

@ -4512,6 +4512,14 @@ def randomGameRequest(topic, memory=""):
loadmodelsettings()
loadsettings()
# Prevent tokenizer from taking extra time the first time it's used
def __preempt_tokenizer():
if("tokenizer" not in globals()):
return
tokenizer.decode([25678, 559])
tokenizer.encode("eunoia")
threading.Thread(target=__preempt_tokenizer).start()
# Precompile TPU backend if required
if(vars.model in ("TPUMeshTransformerGPTJ",)):
soft_tokens = tpumtjgetsofttokens()