Breakmodel working now with the web UI

This commit is contained in:
ebolam 2022-03-07 11:27:23 -05:00
parent 5e00f7daf0
commit 123cd45b0e
3 changed files with 110 additions and 14 deletions

View File

@ -800,13 +800,40 @@ def general_startup():
#==================================================================#
# Load Model
#==================================================================#
def load_model(use_gpu=True, key=''):
def get_layer_count(model, directory=""):
if(model not in ["InferKit", "Colab", "OAI", "GooseAI" , "ReadOnly", "TPUMeshTransformerGPTJ"]):
from transformers import AutoConfig
if(os.path.isdir(directory)):
try:
model_config = AutoConfig.from_pretrained(directory, cache_dir="cache/")
except ValueError as e:
model_config = None
elif(os.path.isdir("models/{}".format(model.replace('/', '_')))):
try:
model_config = AutoConfig.from_pretrained("models/{}".format(model.replace("/", "_")), cache_dir="cache/")
except ValueError as e:
model_config = None
else:
try:
model_config = AutoConfig.from_pretrained(model, cache_dir="cache/")
except ValueError as e:
model_config = None
if model_config is None:
return None
try:
layers = model_config.num_layers
except:
layers = None
pass
return layers
def load_model(use_gpu=True, key='', gpu_layers=None):
global model
global generator
vars.noai = False
set_aibusy(True)
print("Model: ".format(vars.model))
print("args.path: ".format(args.path))
if gpu_layers is not None:
args.breakmodel_gpulayers = gpu_layers
# If transformers model was selected & GPU available, ask to use CPU or GPU
if(vars.model not in ["InferKit", "Colab", "OAI", "GooseAI" , "ReadOnly", "TPUMeshTransformerGPTJ"]):
vars.allowsp = True
@ -2536,15 +2563,20 @@ def get_message(msg):
elif(msg['cmd'] == 'list_model'):
sendModelSelection(menu=msg['data'])
elif(msg['cmd'] == 'load_model'):
load_model(use_gpu=msg['use_gpu'], key=msg['key'])
load_model(use_gpu=msg['use_gpu'], key=msg['key'], gpu_layers=msg['gpu_layers'])
elif(msg['cmd'] == 'selectmodel'):
if msg['data'] in ('NeoCustom', 'GPT2Custom') and 'path' not in msg:
sendModelSelection(menu=msg['data'])
vars.model = msg['data']
if 'path' in msg:
args.path = msg['path']
print(vars.model)
print(args.path)
layers = get_layer_count(vars.model, directory=msg['path'])
else:
layers = get_layer_count(vars.model)
if layers is not None:
emit('from_server', {'cmd': 'show_layer_bar', 'data': layers, 'gpu_count': torch.cuda.device_count()}, broadcast=True)
else:
emit('from_server', {'cmd': 'hide_layer_bar'}, broadcast=True)
elif(msg['cmd'] == 'loadselect'):
vars.loadselect = msg["data"]
elif(msg['cmd'] == 'spselect'):

View File

@ -940,7 +940,6 @@ function hideUSPopup() {
function buildLoadModelList(ar, menu) {
disableButtons([load_model_accept]);
loadmodelcontent.html("");
console.log(menu);
var i;
for(i=0; i<ar.length; i++) {
var html
@ -970,7 +969,6 @@ function buildLoadModelList(ar, menu) {
return function () {
socket.send({'cmd': 'selectmodel', 'data': $(this).attr("name"), 'path': $(this).attr("pretty_name")});
highlightLoadLine($(this));
enableButtons([load_model_accept]);
}
})(i));
} else {
@ -978,7 +976,6 @@ function buildLoadModelList(ar, menu) {
return function () {
socket.send({'cmd': 'selectmodel', 'data': $(this).attr("name")});
highlightLoadLine($(this));
enableButtons([load_model_accept]);
}
})(i));
}
@ -1120,6 +1117,7 @@ function buildUSList(unloaded, loaded) {
function highlightLoadLine(ref) {
$("#loadlistcontent > div > div.popuplistselected").removeClass("popuplistselected");
$("#loadmodellistcontent > div > div.popuplistselected").removeClass("popuplistselected");
ref.addClass("popuplistselected");
}
@ -1825,6 +1823,21 @@ function unbindGametext() {
gametext_bound = false;
}
function update_gpu_layers() {
var gpu_layers
gpu_layers = 0;
for (let i=0; i < $("#gpu_count")[0].value; i++) {
gpu_layers += parseInt($("#gpu_layers"+i)[0].value);
}
if (gpu_layers > parseInt(document.getElementById("gpu_layers_max").innerHTML)) {
disableButtons([load_model_accept]);
$("#gpu_layers_current").html("<span style='color: red'>"+gpu_layers+"</span>");
} else {
enableButtons([load_model_accept]);
$("#gpu_layers_current").html(gpu_layers);
}
}
//=================================================================//
// READY/RUNTIME
//=================================================================//
@ -2382,16 +2395,30 @@ $(document).ready(function(){
}
} else if(msg.cmd == 'show_model_menu') {
if(msg.menu == 'gpt2list') {
$("#use_gpu_div").removeClass("hidden")
$("#use_gpu_div").removeClass("hidden");
} else {
$("#use_gpu_div").addClass("hidden")
$("#use_gpu_div").addClass("hidden");
}
if(msg.menu == 'apilist') {
$("#modelkey").removeClass("hidden")
$("#modelkey").removeClass("hidden");
} else {
$("#modelkey").addClass("hidden")
$("#modelkey").addClass("hidden");
}
buildLoadModelList(msg.data, msg.menu);
} else if(msg.cmd == 'show_layer_bar') {
var html;
$("#modellayers").removeClass("hidden");
html = "";
for (let i=0; i < msg.gpu_count; i++) {
html += "GPU " + i + ": <input type='range' class='form-range airange' min='0' max='"+msg.data+"' step='1' value='"+msg.data+"' id='gpu_layers"+i+"' onchange='update_gpu_layers();'>";
}
$("#model_layer_bars").html(html);
$("#gpu_layers_max").html(msg.data);
$("#gpu_count")[0].value = msg.gpu_count;
update_gpu_layers();
} else if(msg.cmd == 'hide_layer_bar') {
$("#modellayers").addClass("hidden");
enableButtons([load_model_accept]);
}
});
@ -2603,7 +2630,19 @@ $(document).ready(function(){
load_model_accept.on("click", function(ev) {
hideMessage();
socket.send({'cmd': 'load_model', 'use_gpu': $('#use_gpu')[0].checked, 'key': $('#modelkey')[0].value});
var gpu_layers;
var message;
if($("#modellayers")[0].hidden) {
gpu_layers = ","
} else {
gpu_layers = ""
for (let i=0; i < $("#gpu_count")[0].value; i++) {
gpu_layers += $("#gpu_layers"+i)[0].value + ",";
}
}
message = {'cmd': 'load_model', 'use_gpu': $('#use_gpu')[0].checked, 'key': $('#modelkey')[0].value, 'gpu_layers': gpu_layers.slice(0, -1)};
console.log(message);
socket.send(message);
loadmodelcontent.html("");
hideLoadModelPopup();
});

View File

@ -284,6 +284,31 @@
<div class="popupfooter">
<input class="form-control hidden" type="text" placeholder="key" id="modelkey"><br>
</div>
<div class="popupfooter hidden" id=modellayers>
<div class='settingitem' style="width:100%">
<div class='settinglabel'>
<div class="justifyleft">
GPU Layers
<span class="helpicon">?
<span class="helptext">Number of layers to assign to the GPU</span>
</span>
</div>
<div class="justifyright" id="gpu_layers_current">0</div>
</div>
<div id=model_layer_bars style="color: white">
</div>
<input type=hidden id='gpu_count' value=0/>
<div class="settingminmax">
<div class="justifyleft">
0
</div>
<div class="justifyright" id="gpu_layers_max">
24
</div>
</div>
</div>
</div>
<div class="popupfooter">
<button type="button" class="btn btn-primary" id="btn_loadmodelaccept">Load</button>
<button type="button" class="btn btn-primary" id="btn_loadmodelclose">Cancel</button>