Merge pull request #147 from ebolam/Web-UI

Layer input box
This commit is contained in:
henk717 2022-06-15 00:24:58 +02:00 committed by GitHub
commit c4b2bcde4b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 118 additions and 19 deletions

View File

@ -281,7 +281,7 @@ class vars:
colaburl = "" # Ngrok url for Google Colab mode
apikey = "" # API key to use for InferKit API calls
oaiapikey = "" # API key to use for OpenAI API calls
savedir = getcwd()+"\stories"
savedir = getcwd()+"\\stories"
hascuda = False # Whether torch has detected CUDA on the system
usegpu = False # Whether to launch pipeline with GPU support
custmodpth = "" # Filesystem location of custom model to run
@ -434,13 +434,15 @@ def getModelSelection(modellist):
getModelSelection(mainmenu)
def check_if_dir_is_model(path):
try:
from transformers import AutoConfig
model_config = AutoConfig.from_pretrained(path, revision=vars.revision, cache_dir="cache")
except:
if os.path.exists(path):
try:
from transformers import AutoConfig
model_config = AutoConfig.from_pretrained(path, revision=vars.revision, cache_dir="cache")
except:
return False
return True
else:
return False
return True
#==================================================================#
# Return all keys in tokenizer dictionary containing char
@ -1072,6 +1074,10 @@ def get_model_info(model, directory=""):
else:
break_values = [layer_count]
break_values += [0] * (gpu_count - len(break_values))
#print("Model_info: {}".format({'cmd': 'selected_model_info', 'key_value': key_value, 'key':key,
# 'gpu':gpu, 'layer_count':layer_count, 'breakmodel':breakmodel,
# 'break_values': break_values, 'gpu_count': gpu_count,
# 'url': url, 'gpu_names': gpu_names}))
emit('from_server', {'cmd': 'selected_model_info', 'key_value': key_value, 'key':key,
'gpu':gpu, 'layer_count':layer_count, 'breakmodel':breakmodel,
'break_values': break_values, 'gpu_count': gpu_count,
@ -3052,12 +3058,24 @@ def get_message(msg):
# The data variable will contain the model name. But our Custom lines need a bit more processing
# If we're on a custom line that we have selected a model for, the path variable will be in msg
# so if that's missing we need to run the menu to show the model folders in the models folder
if msg['data'] in ('NeoCustom', 'GPT2Custom') and 'path' not in msg:
if msg['data'] in ('NeoCustom', 'GPT2Custom') and 'path' not in msg and 'path_modelname' not in msg:
if 'folder' not in msg:
folder = "./models"
else:
folder = msg['folder']
sendModelSelection(menu=msg['data'], folder=folder)
elif msg['data'] in ('NeoCustom', 'GPT2Custom') and 'path_modelname' in msg:
#Here the user entered custom text in the text box. This could be either a model name or a path.
if check_if_dir_is_model(msg['path_modelname']):
vars.model = msg['data']
vars.custmodpth = msg['path_modelname']
get_model_info(msg['data'], directory=msg['path'])
else:
vars.model = msg['path_modelname']
try:
get_model_info(vars.model)
except:
emit('from_server', {'cmd': 'errmsg', 'data': "The model entered doesn't exist."})
elif msg['data'] in ('NeoCustom', 'GPT2Custom'):
if check_if_dir_is_model(msg['path']):
vars.model = msg['data']
@ -3160,7 +3178,7 @@ def sendUSStatItems():
# KoboldAI Markup Formatting (Mixture of Markdown and sanitized html)
#==================================================================#
def kml(txt):
txt = txt.replace('\>', '>')
txt = txt.replace('>', '>')
txt = bleach.clean(markdown.markdown(txt), tags = ['p', 'em', 'strong', 'code', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'li', 'ul', 'b', 'i', 'a', 'span', 'button'], styles = ['color', 'font-weight'], attributes=['id', 'class', 'style', 'href'])
return txt
@ -3184,6 +3202,7 @@ def setStartState():
#==================================================================#
def sendsettings():
# Send settings for selected AI type
emit('from_server', {'cmd': 'reset_menus'})
if(vars.model != "InferKit"):
for set in gensettings.gensettingstf:
emit('from_server', {'cmd': 'addsetting', 'data': set})
@ -5195,6 +5214,7 @@ def loadRequest(loadpath, filename=None):
vars.lastact = ""
vars.submission = ""
vars.lastctx = ""
vars.genseqs = []
del vars.actions
vars.actions = structures.KoboldStoryRegister()
@ -5456,7 +5476,7 @@ def importgame():
vars.importjs = {}
# Reset current save
vars.savedir = getcwd()+"\stories"
vars.savedir = getcwd()+"\\stories"
# Refresh game screen
vars.laststory = None
@ -5538,7 +5558,7 @@ def importAidgRequest(id):
vars.worldinfo_i = [wi for wi in vars.worldinfo if wi["init"]]
# Reset current save
vars.savedir = getcwd()+"\stories"
vars.savedir = getcwd()+"\\stories"
# Refresh game screen
vars.laststory = None
@ -5631,7 +5651,7 @@ def newGameRequest():
vars.lastctx = ""
# Reset current save
vars.savedir = getcwd()+"\stories"
vars.savedir = getcwd()+"\\stories"
# Refresh game screen
vars.laststory = None
@ -5769,7 +5789,7 @@ if __name__ == "__main__":
while attempts < 10:
try:
cloudflare = str(localtunnel.stdout.readline())
cloudflare = (re.search("(?P<url>https?:\/\/[^\s]+loca.lt)", cloudflare).group("url"))
cloudflare = (re.search("(?P<url>https?://[^s]+loca.lt)", cloudflare).group("url"))
break
except:
attempts += 1

View File

@ -1,2 +1,2 @@
[pytest]
addopts = --ignore=miniconda3 --html=unit_test_report.html -v
addopts = --ignore=miniconda3 --html=unit_test_report.html --self-contained-html -v

View File

@ -151,6 +151,12 @@ function getThrottle(ms) {
}
}
function reset_menus() {
settings_menu.html("");
format_menu.html("");
wi_menu.html("");
}
function addSetting(ob) {
// Add setting block to Settings Menu
if(ob.uitype == "slider"){
@ -876,6 +882,7 @@ function formatChunkInnerText(chunk) {
}
function dosubmit(disallow_abort) {
submit_start = Date.now();
var txt = input_text.val().replace(/\u00a0/g, " ");
if((disallow_abort || gamestate !== "wait") && !memorymode && !gamestarted && ((!adventure || !action_mode) && txt.trim().length == 0)) {
return;
@ -995,6 +1002,7 @@ function buildLoadModelList(ar, menu, breadcrumbs) {
disableButtons([load_model_accept]);
loadmodelcontent.html("");
$("#loadmodellistbreadcrumbs").html("");
$("#custommodelname").addClass("hidden");
var i;
for(i=0; i<breadcrumbs.length; i++) {
$("#loadmodellistbreadcrumbs").append("<button class=\"breadcrumbitem\" id='model_breadcrumbs"+i+"' name='"+ar[0][1]+"' value='"+breadcrumbs[i][0]+"'>"+breadcrumbs[i][1]+"</button><font color=white>\\</font>");
@ -1049,6 +1057,8 @@ function buildLoadModelList(ar, menu, breadcrumbs) {
highlightLoadLine($(this));
}
})(i));
$("#custommodelname").removeClass("hidden");
$("#custommodelname")[0].setAttribute("menu", menu);
//Normal load
} else {
$("#loadmodel"+i).off("click").on("click", (function () {
@ -1908,13 +1918,14 @@ function update_gpu_layers() {
gpu_layers = 0;
for (let i=0; i < $("#gpu_count")[0].value; i++) {
gpu_layers += parseInt($("#gpu_layers"+i)[0].value);
$("#gpu_layers_box_"+i)[0].value=$("#gpu_layers"+i)[0].value;
}
if (gpu_layers > parseInt(document.getElementById("gpu_layers_max").innerHTML)) {
disableButtons([load_model_accept]);
$("#gpu_layers_current").html("<span style='color: red'>"+gpu_layers+"/"+ document.getElementById("gpu_layers_max").innerHTML +"</span>");
} else {
enableButtons([load_model_accept]);
$("#gpu_layers_current").html(gpu_layers);
$("#gpu_layers_current").html(gpu_layers+"/"+document.getElementById("gpu_layers_max").innerHTML);
}
}
@ -2031,9 +2042,7 @@ $(document).ready(function(){
connect_status.removeClass("color_orange");
connect_status.addClass("color_green");
// Reset Menus
settings_menu.html("");
format_menu.html("");
wi_menu.html("");
reset_menus();
// Set up "Allow Editing"
$('body').on('input', autofocus);
$('#allowediting').prop('checked', allowedit).prop('disabled', false).change().off('change').on('change', function () {
@ -2095,6 +2104,10 @@ $(document).ready(function(){
scrollToBottom();
} else if(msg.cmd == "updatechunk") {
hideMessage();
if (typeof submit_start !== 'undefined') {
$("#runtime")[0].innerHTML = `Generation time: ${Math.round((Date.now() - submit_start)/1000)} sec`;
delete submit_start;
}
var index = msg.data.index;
var html = msg.data.html;
var existingChunk = game_text.children('#n' + index);
@ -2292,6 +2305,8 @@ $(document).ready(function(){
} else if(msg.cmd == "setanotetemplate") {
// Set contents of Author's Note Template field
$("#anotetemplate").val(msg.data);
} else if(msg.cmd == "reset_menus") {
reset_menus();
} else if(msg.cmd == "addsetting") {
// Add setting controls
addSetting(msg.data);
@ -2529,7 +2544,10 @@ $(document).ready(function(){
$("#modellayers").removeClass("hidden");
html = "";
for (let i = 0; i < msg.gpu_names.length; i++) {
html += "GPU " + i + " " + msg.gpu_names[i] + ": <input type='range' class='form-range airange' min='0' max='"+msg.layer_count+"' step='1' value='"+msg.break_values[i]+"' id='gpu_layers"+i+"' onchange='update_gpu_layers();'>";
html += "GPU " + i + " " + msg.gpu_names[i] + ": ";
html += '<input inputmode="numeric" id="gpu_layers_box_'+i+'" class="justifyright flex-push-right model_layers" value="'+msg.break_values[i]+'" ';
html += 'onblur=\'$("#gpu_layers'+i+'")[0].value=$("#gpu_layers_box_'+i+'")[0].value;update_gpu_layers();\'>';
html += "<input type='range' class='form-range airange' min='0' max='"+msg.layer_count+"' step='1' value='"+msg.break_values[i]+"' id='gpu_layers"+i+"' onchange='update_gpu_layers();'>";
}
$("#model_layer_bars").html(html);
$("#gpu_layers_max").html(msg.layer_count);

View File

@ -1464,3 +1464,14 @@ body.connected .popupfooter, .popupfooter.always-available {
overflow: hidden;
font-size: 12pt;
}
.model_layers {
width: 3ch;
background-color: inherit;
border: none;
outline: none;
}
.model_layers:focus {
color: #cdf;
}

View File

@ -93,6 +93,7 @@
</div>
<div id="connectstatusdiv" class="flex-row-container">
<span id="connectstatus" class="color_orange flex-row">Waiting for connection...</span>
<div class="layer-container status-container flex-push-left" style="color: #FFFFFF;" id="runtime"></div>
<div class="layer-container status-container flex-push-right">
<span class="oi oi-puzzle-piece statusicon layer-bottom" aria-hidden="true">
<div class="statustext statustext-wide">
@ -288,6 +289,7 @@
<div class="popupfooter">
<input class="form-control hidden" type="text" placeholder="key" id="modelkey" onblur="socket.send({'cmd': 'OAI_Key_Update', 'key': $('#modelkey')[0].value});">
<input class="form-control hidden" type="text" placeholder="Enter the URL of the server (For example a trycloudflare link)" id="modelurl" onchange="check_enable_model_load()">
<input class="form-control hidden" type="text" placeholder="Model Path or Hugging Face Name" id="custommodelname" menu="" onblur="socket.send({'cmd': 'selectmodel', 'data': $(this).attr('menu'), 'path_modelname': $('#custommodelname')[0].value});">
</div>
<div class="popupfooter">
<select class="form-control hidden" id="oaimodel"><option value="">Select OAI Model</option></select>

View File

@ -150,6 +150,10 @@ def test_load_model_from_web_ui(client_data, model, expected_load_options):
generate_story_data(client_data)
def test_load_GooseAI_from_web_ui(client_data):
pytest.skip("unsupported configuration")
@pytest.mark.parametrize("model, expected_load_options", test_models)
def test_load_model_from_command_line(client_data, model, expected_load_options):
(client, app, socketio_client) = client_data
@ -170,3 +174,47 @@ def test_load_model_from_command_line(client_data, model, expected_load_options)
generate_story_data(client_data)
def test_back_redo(client_data):
(client, app, socketio_client) = client_data
#Make sure we have known story in the ui
test_load_story_from_web_ui(client_data)
#Clear out any old messages
response = socketio_client.get_received()
#run a back action
socketio_client.emit('message',{'cmd': 'back', 'data': ''})
response = socketio_client.get_received()[0]['args'][0]
assert response == {'cmd': 'removechunk', 'data': 3}
#Run a redo action
socketio_client.emit('message',{'cmd': 'redo', 'data': ''})
response = socketio_client.get_received()[0]['args'][0]
assert response == {'cmd': 'updatechunk', 'data': {'index': 3, 'html': '<chunk n="3" id="n3" tabindex="-1"> where to find the chicken and then how to make off with it.<br/><br/>A soft thud caused Niko to quickly lift his head. Standing behind the stall where the butcher had been cutting his chicken,</chunk>'}}
#Go all the way back, then all the way forward
socketio_client.emit('message',{'cmd': 'back', 'data': ''})
response = socketio_client.get_received()[0]['args'][0]
assert response == {'cmd': 'removechunk', 'data': 3}
socketio_client.emit('message',{'cmd': 'back', 'data': ''})
response = socketio_client.get_received()[0]['args'][0]
assert response == {'cmd': 'removechunk', 'data': 2}
socketio_client.emit('message',{'cmd': 'back', 'data': ''})
response = socketio_client.get_received()[0]['args'][0]
assert response == {'cmd': 'removechunk', 'data': 1}
socketio_client.emit('message',{'cmd': 'back', 'data': ''})
response = socketio_client.get_received()[0]['args'][0]
assert response == {'cmd': 'errmsg', 'data': 'Cannot delete the prompt.'}
socketio_client.emit('message',{'cmd': 'redo', 'data': ''})
socketio_client.emit('message',{'cmd': 'redo', 'data': ''})
socketio_client.emit('message',{'cmd': 'redo', 'data': ''})
response = socketio_client.get_received()
assert response == [{'name': 'from_server', 'args': [{'cmd': 'updatescreen', 'gamestarted': True, 'data': '<chunk n="0" id="n0" tabindex="-1">Niko the kobold stalked carefully down the alley, his small scaly figure obscured by a dusky cloak that fluttered lightly in the cold winter breeze. Holding up his tail to keep it from dragging in the dirty snow that covered the cobblestone, he waited patiently for the butcher to turn his attention from his stall so that he could pilfer his next meal: a tender-looking</chunk><chunk n="1" id="n1" tabindex="-1"> chicken. He crouched just slightly as he neared the stall to ensure that no one was watching, not that anyone would be dumb enough to hassle a small kobold. What else was there for a lowly kobold to</chunk>'}], 'namespace': '/'}, {'name': 'from_server', 'args': [{'cmd': 'texteffect', 'data': 1}], 'namespace': '/'}, {'name': 'from_server', 'args': [{'cmd': 'updatechunk', 'data': {'index': 2, 'html': '<chunk n="2" id="n2" tabindex="-1"> do in a city? All that Niko needed to know was</chunk>'}}], 'namespace': '/'}, {'name': 'from_server', 'args': [{'cmd': 'texteffect', 'data': 2}], 'namespace': '/'}, {'name': 'from_server', 'args': [{'cmd': 'updatechunk', 'data': {'index': 3, 'html': '<chunk n="3" id="n3" tabindex="-1"> where to find the chicken and then how to make off with it.<br/><br/>A soft thud caused Niko to quickly lift his head. Standing behind the stall where the butcher had been cutting his chicken,</chunk>'}}], 'namespace': '/'}, {'name': 'from_server', 'args': [{'cmd': 'texteffect', 'data': 3}], 'namespace': '/'}]