GPU detection bugfix
This commit is contained in:
parent
99c5ff240c
commit
dd77ac2f3a
|
@ -198,9 +198,9 @@ if(not vars.model in ["InferKit", "Colab", "OAI", "ReadOnly"]):
|
||||||
elif(vars.hascuda):
|
elif(vars.hascuda):
|
||||||
print("{0}Use GPU or CPU for generation?: (Default GPU){1}\n".format(colors.CYAN, colors.END))
|
print("{0}Use GPU or CPU for generation?: (Default GPU){1}\n".format(colors.CYAN, colors.END))
|
||||||
print(" 1 - GPU\n 2 - CPU\n")
|
print(" 1 - GPU\n 2 - CPU\n")
|
||||||
|
genselected = False
|
||||||
|
|
||||||
if(vars.hascuda):
|
if(vars.hascuda):
|
||||||
genselected = False
|
|
||||||
while(genselected == False):
|
while(genselected == False):
|
||||||
genselect = input("Mode> ")
|
genselect = input("Mode> ")
|
||||||
if(genselect == ""):
|
if(genselect == ""):
|
||||||
|
|
Loading…
Reference in New Issue