mirror of
https://github.com/KoboldAI/KoboldAI-Client.git
synced 2025-02-12 17:50:45 +01:00
Merge branch 'united' into mkultra
This commit is contained in:
commit
cbab98cc23
20
aiserver.py
20
aiserver.py
@ -1483,22 +1483,22 @@ def get_model_info(model, directory=""):
|
||||
|
||||
def get_layer_count(model, directory=""):
|
||||
if(model not in ["InferKit", "Colab", "API", "OAI", "GooseAI" , "ReadOnly", "TPUMeshTransformerGPTJ"]):
|
||||
if(vars.model == "GPT2Custom"):
|
||||
model_config = open(vars.custmodpth + "/config.json", "r")
|
||||
if(model == "GPT2Custom"):
|
||||
with open(os.path.join(directory, "config.json"), "r") as f:
|
||||
model_config = json.load(f)
|
||||
# Get the model_type from the config or assume a model type if it isn't present
|
||||
else:
|
||||
if(directory):
|
||||
model = directory
|
||||
from transformers import AutoConfig
|
||||
if directory == "":
|
||||
model_config = AutoConfig.from_pretrained(model, revision=vars.revision, cache_dir="cache")
|
||||
if(os.path.isdir(model.replace('/', '_'))):
|
||||
model_config = AutoConfig.from_pretrained(model.replace('/', '_'), revision=vars.revision, cache_dir="cache")
|
||||
elif(os.path.isdir("models/{}".format(model.replace('/', '_')))):
|
||||
model_config = AutoConfig.from_pretrained("models/{}".format(model.replace('/', '_')), revision=vars.revision, cache_dir="cache")
|
||||
elif(os.path.isdir(directory)):
|
||||
model_config = AutoConfig.from_pretrained(directory, revision=vars.revision, cache_dir="cache")
|
||||
elif(os.path.isdir(vars.custmodpth.replace('/', '_'))):
|
||||
model_config = AutoConfig.from_pretrained(vars.custmodpth.replace('/', '_'), revision=vars.revision, cache_dir="cache")
|
||||
else:
|
||||
model_config = AutoConfig.from_pretrained(vars.custmodpth, revision=vars.revision, cache_dir="cache")
|
||||
|
||||
|
||||
|
||||
model_config = AutoConfig.from_pretrained(model, revision=vars.revision, cache_dir="cache")
|
||||
return utils.num_layers(model_config)
|
||||
else:
|
||||
return None
|
||||
|
@ -66,7 +66,7 @@
|
||||
"#@title <b><-- Select your model below and then click this to start KoboldAI</b>\n",
|
||||
"#@markdown You can find a description of the models below along with instructions on how to start KoboldAI.\n",
|
||||
"\n",
|
||||
"Model = \"Nerys 13B V2\" #@param [\"Nerys 13B V2\", \"Janeway 13B\", \"Shinen 13B\", \"Skein 6B\", \"Janeway 6B\", \"Adventure 6B\", \"Shinen 6B\", \"Lit 6B\", \"NeoX 20B\", \"OPT 13B\", \"Fairseq Dense 13B\", \"GPT-J-6B\"] {allow-input: true}\n",
|
||||
"Model = \"Nerys 13B V2\" #@param [\"Nerys 13B V2\", \"Janeway 13B\", \"Shinen 13B\", \"Skein 20B\", \"Skein 6B\", \"Janeway 6B\", \"Adventure 6B\", \"Shinen 6B\", \"Lit 6B\", \"NeoX 20B\", \"OPT 13B\", \"Fairseq Dense 13B\", \"GPT-J-6B\"] {allow-input: true}\n",
|
||||
"Version = \"Official\" #@param [\"Official\", \"United\"] {allow-input: true}\n",
|
||||
"Provider = \"Cloudflare\" #@param [\"Localtunnel\", \"Cloudflare\"]\n",
|
||||
"\n",
|
||||
@ -93,6 +93,10 @@
|
||||
" Model = \"KoboldAI/fairseq-dense-13B-Shinen\"\n",
|
||||
" path = \"\"\n",
|
||||
" download = \"\"\n",
|
||||
"elif Model == \"Skein 20B\":\n",
|
||||
" Model = \"KoboldAI/GPT-NeoX-20B-Skein\"\n",
|
||||
" path = \"\"\n",
|
||||
" download = \"\"\n",
|
||||
"elif Model == \"NeoX 20B\":\n",
|
||||
" Model = \"EleutherAI/gpt-neox-20b\"\n",
|
||||
" path = \"\"\n",
|
||||
@ -128,7 +132,7 @@
|
||||
"elif Model == \"GPT-J-6B\":\n",
|
||||
" Model = \"EleutherAI/gpt-j-6B\"\n",
|
||||
" path = \"\"\n",
|
||||
" download = \"\"\n",
|
||||
" download = \"\"\n",
|
||||
"else:\n",
|
||||
" path = \"\"\n",
|
||||
" download = \"\"\n",
|
||||
@ -225,4 +229,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 0
|
||||
}
|
||||
}
|
2
utils.py
2
utils.py
@ -170,7 +170,7 @@ def decodenewlines(txt):
|
||||
# Returns number of layers given an HF model config
|
||||
#==================================================================#
|
||||
def num_layers(config):
|
||||
return config.num_layers if hasattr(config, "num_layers") else config.n_layer if hasattr(config, "n_layer") else config.num_hidden_layers if hasattr(config, 'num_hidden_layers') else None
|
||||
return config["n_layer"] if isinstance(config, dict) else config.num_layers if hasattr(config, "num_layers") else config.n_layer if hasattr(config, "n_layer") else config.num_hidden_layers if hasattr(config, 'num_hidden_layers') else None
|
||||
|
||||
#==================================================================#
|
||||
# Downloads huggingface checkpoints using aria2c if possible
|
||||
|
Loading…
x
Reference in New Issue
Block a user